var/home/core/zuul-output/0000755000175000017500000000000015140100550014514 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015140107135015466 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000304461615140107056020263 0ustar corecore.ikubelet.log_o[;r)Br'o-n(!9t%Cs7}g/غIs,r.k9GfB…Qs6b}Wߟ/nm͊wqɻlOxN_~𒆷7̗8zTY\].f}嗷ovϷw_>on3cvX~egQBeH,nWb m/m}*L~AzHev_uαHJ2E$(Ͽ|/+k*z>p R⥑gF)49)(oՈ7_k0m^p9PneQn͂YEeeɹ ^ʙ|ʕ0MۂAraZR׏!#Šgv cXk?`;'`&R7߿YKS'owHF6":=3Ȑ 3xҝd){Ts}cZ%BdARO#-o"D"ޮrFg4" 0ʡPBU[fi;dYu' IAgfPF:c0Ys66q tH6#.`$vlLH}ޭA㑝V0>|J\Pg\W#NqɌDSd1d9nT#Abn q1J# !8,$RNI? j!bE"o j/o\E`r"hA ós yi\[.!=A(%Ud,QwC}F][UVYE NQGn0Ƞɻ>.ww}(o./WY<͉#5O H 'wo6C9yg|O~ €'} S[q?,!yq%a:y<\tunL h%$Ǥ].v y[W_` \r/Ɛ%aޗ' B.-^ mQYd'xP2ewEڊL|^ͣrZg7n͐AG%ʷr<>; 2W>h?y|(G>ClsXT(VIx$(J:&~CQpkۗgVKx*lJ3o|s`<՛=JPBUGߩnX#;4ٻO2{Fݫr~AreFj?wQC9yO|$UvވkZoIfzC|]|[>ӸUKҳt17ä$ ֈm maUNvS_$qrMY QOΨN!㞊;4U^Z/ QB?q3En.اeI"X#gZ+Xk?povR]8~깮$b@n3xh!|t{: CºC{ 8Ѿm[ ~z/9آs;DPsif39HoN λC?; H^-¸oZ( +"@@%'0MtW#:7erԮoQ#% H!PK)~U,jxQV^pΣ@Klb5)%L%7׷v] gv6دϾDD}c6  %T%St{kJ_O{*Z8Y CEO+'HqZY PTUJ2dic3w ?YQgpa` Z_0΁?kMPc_Ԝ*΄Bs`kmJ?t 53@հ1hr}=5t;nt 9:I_|AאM'NO;uD,z҄R K&Nh c{A`?2ZҘ[a-0V&2D[d#L6l\Jk}8gf) afs'oIf'mf\>UxR ks J)'u4iLaNIc2qdNA&aLQVD R0*06V۽棬mpھ*V I{a 0Ҟҝ>Ϗ ,ȓw`Ȅ/2Zjǽ}W4D)3N*[kPF =trSE *b9ē7$ M_8.Ç"q ChCMAgSdL0#W+CUu"k"圀̲F9,,&h'ZJz4U\d +( 7EqڏuC+]CEF 8'9@OVvnNbm: X„RDXfיa }fqG*YƩ{P0K=( $hC=h2@M+ `@P4Re]1he}k|]eO,v^ȹ [=zX[tꆯI7c<ۃ'B쿫dIc*Qqk&60XdGY!D ' @{!b4ִ s Exb 5dKߤKߒ'&YILұ4q6y{&G`%$8Tt ȥ#5vGVO2Қ;m#NS8}d0Q?zLV3\LuOx:,|$;rVauNjk-ؘPꐤ`FD'JɻXC&{>.}y7Z,).Y톯h7n%PAUË?/,z_jx܍>М>ӗom$rۇnu~Y݇̇TIwӜ'}׃nxuoỴRZ&Yzbm ]) %1(Y^9{q"4e?x+ [Vz;E|d1&ږ/0-Vb=SSO|k1A[|gbͧɇد;:X:@;afU=Sru CK >Y%LwM*t{zƝ$;ȾjHim @tBODɆj>0st\t@HTu( v e`H*1aK`3CmF1K>*Mk{_'֜dN${OT-n,'}6ȴ .#Sqη9]5zoX#ZVOy4%-Lq6dACYm*H@:FUф(vcD%F"i ' VVdmcOTKpwq.M?m12N[=tuw}opYG]2u<ΰ+a1tHayɒ aY(P*aaʨ@ΰ<pX X{k[%Egl1$9  ֲQ$'dJVE%mT{z`R$77.N|b>harNJ(Bň0ae3V#b,PY0TEu1L/]MTB4$`H6NI\nbǛ*AyA\(u|@ [h-,j7gDTÎ4oWJ$j!fc̖F4BJ2ᮚ苮p(r%Q 6<$(Ӣ(RvA A-^dX? I,($F{ձ7*Oy 6EK( EF #31J8mN .TTF9㕴/5~RxCe,&v3,JE- ZF5%Da,Gܠ*qI@qlG6s푻jÝ$ >8ȕ$eZ1j[h0SH,qf<"${/ksBK}xnwDb%M6:K<~̓9*u᛹Q{FЖt~6S#G1(zr6<ߜ!?U\(0EmG4 4c~J~]ps/9܎ms4gZY-07`-Id,9õ԰t+-b[uemNi_󈛥^g+!SKq<>78NBx;c4<ニ)H .Pd^cR^p_G+E--ۥ_F]a|v@|3p%kzh|k*BBRib\J3Yn|뇱[FfP%M:<`pz?]6laz5`ZQs{>3ư_o%oU׆]YLz_s߭AF'is^_&uUm$[[5HI4QCZ5!N&D[uiXk&2Bg&Ս7_/6v_cd쿽d@eU XyX2z>g8:.⺻h()&nO5YE\1t7aSyFxPV19 ĕi%K"IcB j>Pm[E[^oHmmU̸nG pHKZ{{Qo}i¿Xc\]e1e,5`te.5Hhao<[50wMUF􀍠PV?Yg"ź)\3mf|ܔMUiU|Ym! #'ukMmQ9Blm]TO1ba.XW x6ܠ9[v35H;-]Um4mMrW-k#~fؤϋu_j*^Wj^qM `-Pk.@%=X#|ۡb1lKcj$׋bKv[~"N jS4HOkeF3LPyi︅iWk! cAnxu6<7cp?WN $?X3l(?  'Z! ,Z.maO_Bk/m~ޖ(<qRfR"Au\PmLZ"twpuJ` mvf+T!6Ѓjw1ncuwo':o gSPC=]U҅yY9 &K<-na'Xk,P4+`Þ/lX/bjFO.= w ?>ȑ3n߿z,t s5Z/ Clo-` z?a~b mzkC zFȏ>1k*Dls6vP9hS  ehC.3 @6ijvUuBY hBnb[ Fr#D7ćlA!:X lYE>#0JvʈɌ|\u,'Y˲.,;oOwoj-25Hݻ7 li0bSlbw=IsxhRbd+I]Y]JP}@.供SЃ??w w@KvKts[TSa /ZaDžPAEư07>~w3n:U/.P珀Yaٳ5Ʈ]խ4 ~fh.8C>n@T%W?%TbzK-6cb:XeGL`'žeVVޖ~;BLv[n|viPjbMeO?!hEfޮ])4 ?KN1o<]0Bg9lldXuT ʑ!Iu2ʌnB5*<^I^~G;Ja߄bHȌsK+D"̽E/"Icƀsu0,gy(&TI{ U܋N5 l͖h"褁lm *#n/Q!m b0X3i)\IN˭% Y&cKoG w 9pM^WϋQf7s#bd+SDL ,FZ<1Kx&C!{P|Ռr,* ] O;*X]Eg,5,ouZm8pnglVj!p2֬uT[QyB402|2d5K: `Bcz|Rxxl3{c` 1nhJzQHv?hbºܞz=73qSO0}Dc D]ͺjgw07'㤸z YJ\Hb9Ɖ„2Hi{(2HFE?*w*hy4ޙM^٫wF(p]EwQzr*! 5F XrO7E[!gJ^.a&HߣaaQÝ$_vyz4}0!yܒ栒޹a% Ŋ X!cJ!A\ ?E\R1 q/rJjd A4y4c+bQ̘TT!kw/nb͵FcRG0xeO sw5TV12R7<OG5cjShGg/5TbW > ]~Wޠ9dNiee$V[\[Qp-&u~a+3~;xUFFW>'ǣC~방u)т48ZdH;j a]`bGԹ#qiP(yڤ~dO@wA[Vz/$NW\F?H4kX6)F*1*(eJAaݡ krqB}q^fn 8y7P  GRޠkQn>eqQntq"Occ°NRjg#qSn02DŔw:ؽ 5l)Fa/TTmCԤ{"9b{ywSXE*m#3U ùRIvޏrJ`k|wJKH:O*OKy`( ݢe*{ ua ȻݔhvOkU~OǠI/aǕ-JMX _.6KsjA Qsmd  O#F.Uf28ZAgy>y,d$C?v01q5e.Um>]RLa&r?+@6k&#l)I5_> ` D s5npo}/ؙq #a2V?X~.4O/'|/_|&q̑0dd4>vk 60D _o~[Sw3ckpkpLNa ^j 5*<&}kˢmqvۗj=<Tr=[ a^؃ È(<^=xZb [_tܡ&yЋ{ Sym^?̑sU~' Ԓ f\itu)b>5X -$s?{WƱP#sľ$k$QpgV lT V%6TvݬEV>EoJ-=c9gPŰrY+;v?[u»Q U5rbkm@ DЬ$(1HM9AidoC" >' mXg1/4`x#Ek ޜj6UxTȨ DٍH%$ i;UTb EeYL=DA)ytx؞TT!$' !Sind sB=dnAK"KBOj]X_]]_٬=}oS#HRu5NGr:)mSɛ*>U'$##k3 -Gk7Tb*X4 sKHD-%" Yy'0!iRd=Б]2HWb>՗js =T2L0 ;Ujh@yދo> zޫ$R:%&T;o0b}0dXOYb=^,fXv#-'|EǺ .ϼ ]qm0[R"/7SYJXC*Jj oB=j]1f1X+x焳!61Bǭ8b><ON1_Ѕ(= '{ R@1KCx{Ae )0Qeg|2VׯFYvxȲ]0zݜ:8k_-U'. xDCxGֈ%Ad:^Gy G:yP4RsJOAX˙A|ϭe.x狑HY;Dq5HTP 5Hj Xg5!8_ BbBQ˳3ZTMY8уdɷO(]gZ+6⍗ʘ&wk,5;pvMXKx_M.8_8/Q_`{\82h:bYdw>|@K0eR.bvswr葵q4^|h>)m-H)dv'YJ,f̓ -S ƅ3k$kcѤR=II:"^6pxff*1^r]Ӭw '#XRN']{5fZ6qجz 4 %aaa<ŠʷtqAj4?1q?'|8٭]d>'ՔnH忑^nl5oEuwĻ,㻓[3iIvEj5  o'ge@#]#el̆ yOE/hD<[9r7'9Se۔l_{1md}E%Ɉ}ՔV $)矇y:Qn9<O֒j"0OI+R=u#U|z꼐zsW}2_X馄t!}Y{B0 wJ`\PF~]g="@8  m?٢txr;suoG-=ħ$mYdC\軞Zd)9L$~IL].4kmqܶ_s(u֊XKYt([mjh ڒp28;j?;g|buC+,kK-y/-6hOw\r#XK4 y><6AX3F#{O%$cź$*^^t1.(zdD7ڣ߾[u2_(ѳ*"-;xsrX۲47EKK%hU܃A!>xsc)U7[gTZ Jd XgLd^j-ԬoԴ%FަD :pq:C\j#yXsREʅ(d_M;itm99C8-Ca ݿV6xcUW5Ĭhft3˻Ef%{ Ë.ߏ*bd \9[Up`)FKv|v`.oV|0WDҤ nZ:]]}nS͝2gêlWӫXgW{o6.q3QĞuREIN~xܽJN8/nza4{ `RiKgk`"Ve_x4`T*t"*LwbwT*ZuX0"]z،y0S t*9 J']xǍե 6xAϴkXB:|T`{,쥀 . "#y `lΠJ7Cw{Yzyx!9}8Gk.oIm-veѿ4Fx1Ļ 6\eu'YtruzBRu qyh˃7_GKzkohyeXkQY6yrt=rlB *= Ui][ar\w[/7cׂQ>TI PX[X9<OJǼ%o򼯅7㻓; LDቀάKåPRq/ThO|_gS:QiB*_tlCDo&w xe0B}wv0MY]^Ь\4oڷS*1(y`]vGɋT5y*`i"Ģɝ&RBEf."`4 )tn!hP FjSI}v$Sp-)U*ךR\wG 18(lg {HZX+Ks{?w ; Q+^!>7Lfvs3M17ɿKhԻE^e䎝0W]4 ARI@.z򤶋4rdeʚƀg]GSd0BCVdMsKm!#:_Q>|ISuȌ_hzId qla,I:ɪ`@fh@2E1Z`&{"PP&IXe|_b|1^N3>;FTqB#nYJP)@Vd΃"J?n?lEQsI@6<8g#Y+yDB/bPB@/f>LI P.Ffv=֏G)*S[`M7IIK #J~jkES+:h!du// K> ˼N曂{bHy{~}_]p|_̯G*XD5?ͩ2m` v$kn_bWu-/0y}}jeP[KR2D.$S2bT>L5eG-)w>4w&Jnl"Mz_.G - /_7/TS5UNsX!$'%Gz ƌm7ke>\RRaIV'¶6!=8?8oZY|Uv`Ƌ-vo|J:9[v~\:衡pO`c IcjlX):_ EeV a"҅2jB7S2t=t).\aQcb^CZ-uvpr!(dv^'5|X/nI-D!PltsHDwQ$zzBvQ0h} -_>7޴kӔn,?W1;{|,ۇ=sx+@{l?.*+E>1]8B*0)QsU·BS&vp/Χ6I[Ux8"EȿQGa[qVmSІ Y$9F_h6~߮ )ib+q!EXFucYgcV>&w6?H+NL$]p>I*eOjpcm{Bl9vQ.OT!9U}W冨 ;])>6JdbXԠ `Z#_%.9VF[7Id8:W5>*N>KStE)KS1z2"l%^NEN? _4ILAٍKK7)O,:خc磶Fcݒ”^h*_G juİZsQ ~!GAxg_$A|`e)B QlvKlXt҈t9JXXqdl[r9RǦ:q5E](>Z zZ1&8G9r޴T0=Cj,?V~:3] ;Y[OӬNb1{8+7%L1OUaY쎹aZHgi |D `%޻I>rd31V_Sh])AUqػu\Mڗ鸷A+A.A~&'f2*q0âZEqrO| \56cTAnOFo^ X]joC!Pu!Jm l?Qac_>'"Bg<" 0H_-AnG =q޵^Ų gwpГz]'ť$:fr;M-e ՠNhfG8>Vڇ RAٽ9| cbpcT?x]aU {ӋG ނ1v_/EZ~'*.EΑ9U.ϊ/,9怕:[QcUyUrŽ XRjwflѓ6.ܮCy*8¢O[9bu) O14B`.z͜u-ss>Uݴ SaSK§ GT6&l`GT~ꢰ\0P8_)Z]k5>.1C( .Kp| vä+ kj· dM[a^ $H;M $YǫU>?<UݸoRV >IsawF\b+s~p"eʰ(zZ=.!BjѕFdpUna"Odb *75:&C k1ͤ#O Rۘ– Er/G/UcAPQT]|XN X]^F Ŗ:ޔ&+@,{3T\X)|*HN'e*h0:VumBl ۏ `9AgPF_Ѫ2)sCj1T.S0Z1:?Wy9egI+bK?&#I3X\WGZ3M`AI.pH6xm`Rs _Jt@U8jxɕͽf3[I3G$)ՖbG(}1wL!wVf;L|14jغRqcZRݹJ$]!:YF2cU(7B~ ;Wi+vwv-_@q)+?Dobtm4Sxb(9A `aRs ޶d6'XA5?V_W puȇ;s`uih _F2% [U۴"qkjGX6)_+(f?\T)* &9V(]"tJ8۷)g3J1n`ROu~}#Ѥ#r !J0CQ v⯥ho1=V T:_#OV+kG\8Sz^'툜+OqFǤSCǔl X1\1:" 0mtH,>7>a.fpU`ZR֩bK'`tTiwm* "Qi+ *mDtH-ʐ?sk47iIb3Ώ%TCv}e{̈́=I;iƊc2J1TN>7q;"sդsP[ kW`u!8Rj.2hgWsE.,uDΡ1RºVݐ/CBc˾[ shGI 0Os\l}`pΕ qO-ˠ{'\ QuaBn|L@drVec>$Ȃ1L-'{뭄GdɱL ;V[bp>!n&աI̱Sx!shjuL P Ӧɇ~t#K1pVi8F'+1dc&xF 2侯}>tiDpU`%7iTH .Y[L'y}Jm2$EB"{3cMmhipEI:59sTz?[uvcD-~V,.ȍȱHEB:p`\E)jlȔa|)nɲ"Tq?E8V 7z[v_J~C4>''Rc1-V RtzJ=sۄ`g?7̪ #`u0V<s)/=rnlg9| RD1౱UR}UR,:ơz/lvc& GHwMlF@a4D Oj!;V|aq>0*s%6)L?$ća$."T#yqHhlك&ٕEt_$d:z2-\NR#cDB/eWzH 1Հm -XıUXFr\A-2]6u/ųz Z?ڢV)-&!8vL f2D?#y8Vk[~;DSu>6nQ qf2Lυi l-傊וzF"daf]>HUd6JG`\g2%tJ4vX[7g"zw\k>kqBbB;t@h)Gရ[-rnl-wgpn]Y#&ߓ_SGo_&AJ烠a/f_Œ8aJM6MY(3ͯl~l8V0٪T zL{As:&EXAn 8Ugݗ^Os RTnp{PZ )`S!9| Z*7XibR${˪AokOr\y$ ^X9cZƕbR٢^ i'8MgQDٺDU'ٿ %ٱӤt=jF֏d~3Γ|HkQٍM?4M68XU"U5Y6x*æl}r$F 24I(AˤJd>^Dƪ0h~k#XmVcUi>e~jPpq4~ƪʀwDYI%wM9vXi!W$Tލz|F UASVٚ:Ύ<)jDƪ05 [ѪA "y=ZK ƪ˸C5iyAU=nF*5s(R^r4yn7nà ytU#Gc~tI, FT 2-0-zB䑸/`8;ǩ9y蔔ay;));1zSRv٩v6w1}<7|1keEh+ŕKhlSAWEXPcspF Cb/ ֿ'iqJ<۴K]H>Oz(QXؒP!!O |ϦvLfU %Q Οó*}oN,ySXj̋Q婢0cߕ #X̓|@%BUuaR/ ޖ#.w(4fgS4͐~DA@uƁ'#Eqj.RD*St$y 2nJ^`}[MqZ~;vN/^vYD'ޅ!\ÅR`5WYOp^MNpї NP`╔ubn!)o&0((?'Ľ%0(Ǹ/ZVmj'l\;k/DS."^p(0ɯ/GQOUN+x26(VuRMQ(,l+5sP8p͛WRBꞹqmm _'!Uzwhu*R5taҙwQɦ QNs7hеKU!B᠟KK]R/ە<3rkpBCBKL]u"U]Zd<^CRew+^|3{D°§gIޯy󤖏qyR ڪTܥ+#}&u`~9W8u(.DՐY⌊נG~-*~y6N!P OW+aϊ2Ňp^CsAK^.Dx=q`[q9P܊$U+Cm:I|UW?/H+. !PoF\YNOR=7'"%i#5P5H)xn.L6)ێh ŢH|Ⱥ»+FdI}|DLckʧ jy-rF )bVHF;Em?>2!̘B"˾`dV7{78i6`IpѢ`4MkI>> g`fjSw:+!L}*PP|s(U P#Dnl2CC4h[eNp-OC{ZFَ1N|{7%<#)مTeo1>jw:iPi:gANcTGK@t>mRcZQ|ߑTpy|qN:0/ u:g:2dywUq{vwe]unWE 3'R{oRi,QgG@h@@g*xO䚩f8ʾ"f̊v{k=bxa\FI?!_@P9DFmNU8nZw$HvmKmWHKKkwچ"W ^אlP jH| XpuOsY A;Wb@2w{wt>-Ʋ CnVy@@E+>\,Up4e/x%PXIٻR@`}(1oI_Wۦ߽mjHcigGb*S'%DjٻR{$rmJPF]Jzޱ)vvZ_@cQ4Mjp FPidR2]]t L{ *3M$k@K2-oz^M2T7 a::e#TT#.F*Y.0c!J\ Q.Q:)Xs'% eI6I4G>a]ijh{AG=42sI!hH;pP Q`6 ̖y4ݸ+ ԾAk(ޮHμ.Gvfhn:(efThP#W&6y㣏 kukhF6 r&X:Sj[Xo뽴m}8fq;7MtطbR愁m^B&_ץS쮣ZCGv!5׵YN|՝Z]ڲ-^نiwYe5P٩CӻHZmpaSj:ގp!&>6i;oet]KFwוlqĘVVӢkv$EHrDlhJ}{Tz[IV:5\FB,nueAhZ﫽2 -Vن!:S/dSCJt/+2+V r@RpѤ; ǫGwoU3 Z!UE2V{3~Kf2@wWՍTPőz@Rwba,]~qzf> T%Z!f 9!&YUf]nHf5HZZ=3 lH[Bdۮyô |^UwYUK,QU,-_O֕WrWj߸wHUe!S9Du݁'Hsں'P-G$Ĵ8GAr]qIUH@>3GWdXg͍lGfj-ck '~r8UɔAܝG)b,3D(E\9>XqK)an>1k\^ x^nr. 1otkwr`eew 7(F ;z 12-_keʨ}fSL04SA9[+Pri*m@&B=A2 ,lEY഻z]sFWPíIǠJR$;Q%\UR(!&@Jֹ_ %J8%ٍ$`_ϣ_77"ϧ00@7ѭ1 l?K{mpI\ lϮ ۀS2EL MlM܇h%³IUĝ8ny(Jș7_cp>w p?n;,8p :mAN̰5iC/"iv0ţs'>Buxp qoi 1~nWBI%1yї<2鸊xd9-U'ETg7Ѽ1yZ?ȍA`V>S~]{c''D1t2)S\cjJ8UZUo/TZOыk6tĻhMOv/\*= 黃3?$o:m` r<>m9qϑxMD!zB_xjr<:mG 8 7;&.:G#'V::gWy!엶q[P@a@.,iLjd%1݄z3+P>XQ`50tt%C b^E"#65q~0o}G`@0i;#5~!^ߦ[e0G.)/g: #@;*qC0r} >}1 8.zb,@ Z>wlug~À4ѩw<@~8ex@[ Ǯ feю'm7| 8a@AlH}CE L)!^H:&;˻ో~E@t} ?7+}:I.=X`Dt|u7 w?;e xq֗`~H&VR+ ,p9&4=G"ڵ'# gc" /uG~"p_##/aNiOb"s L}M ׾3gnkOS1>yru`\wN ]G| K9KW.Yy gށ}`qvH{}y >ѱg 3Rea7?S8NM<fwr?.ʹwbzqۡ}q,˺*p_ (,S%m`{1wL|3nxo^߼oW#r c:fS,d)Wށ`Qa\МA SQN"eY",,+6iBbuWX'H@pؘH^h0ڼ[#|"^ iG g;ގ"-GYI&e1#:*Gp`ա`GY ->y:pazWО0:KܨJuq.7i+ fj=PEX\0Y!b3iBr%4G}< <+[P7愺k4 9j%\F4:;`a9R_p_&mθ68.`u}V;D6qr*"(rO~-Z43Q,Yc( G"d]h 0a="֦j{t_,7g<MB^e`Ӳܳ4h\/ՊM8M~|M_IպJ"s$k8s`!X\]2XjA>ZWż G#6Ƞu Uu _T#} ?RkX__}BV,9q!t0p`f h_֧JLKh޽UV} Ǯq!оG䚑R w^ 3\+9p2'45!ҡ4T;W VI8X@\=(7xœCأ%Q4m/p/c'8RQ^k (U G* A2fcb:hM|2]EkQdWȯfѰy)(ʐYOۆB`1O*2sdO ^b`5 rr/ ^ ›Rk=?<+ LH?UQzuPhl(xp>V@Ӆ)>훻5P\mh'0˫EVCV-2qia#?>$o$¾?`љ<]vpďScm(o"e8˙x</?vyp|Oz:-|~[}~}yR $UzezWa_FyK6_ MdԃΉ90w0GwȏӘIE*(d0 *?g8:[@!KXx~x!>K]&R$hPV5Z]0M7ty#խMGjW1ÐgnmY=X]kۧ>zzU.Fk>w(~ 6 ~C>6,LyGjC]Hwjf^ީ}>R7x Td2.rqF,J%C¼ȱW$ $AK8oƈB&jUb2hRM ]7l=/_٦W-d7fip:NrC!ud;sI퍹A[{$⊭C:Z`7@|n6~jPor _/}b^QFQҮzkb'ZJ_a2?)elFcW_t2`c$Ku۔ԕRPisHt6;c߇`SF2, 9,#XᷦVP.(BPl7AezK6}ʞЩl^eOVUحz)]k>s]{]Z{ aew~::[l/u ʷo/(MPDA낺[n/u[BPo{A( &DA-  4^`7A' l!XTl!^P≂ B&H?MN$%խ?2.뻼fd&o;0qUE1zMlqU,$,8w* cĥ IA/,R ЌR&m==$^4BM!C:?tSq_<-UMSޟyˬJɯY"2gP!(Pràl1?O'j{N/vC[D"JCK.q*EFGj {YB$Uϓ2+c`Ѿ>7]ИBa(Bs~QI`5 T4V*ӠDCD[NL0s1>qj&Ki CZ/{_-||+ʪ]HUjn4Q 3M rG2@hݻ3ԍV8BZH H -wg-kbcm*eGf.Ҁ\@ ~N 8M2QdQyZ'P?_YIQ)CC y4Wkr:[fK˦ttt'qqf"} 5'ldVWk%L!5l,8 _ӪzC4]J $^UCx(TS)< 4Q6SrJC#wR@5ZC#Lվ)Sc}*i~v+ f6!LZ):}N=U rNg.`vJ8EFc W_@LzLkHo62n"9ɒlfTxhq3{mﴗg‚:Zԕ,%I.θQ$̕(F`0,utLno]kez[^Ypzd-WʁBr͆ȓ6y;p eL_yvRY,dpڳ8]D"%ElTQ(ϐA/ DMwDf6w["k&ၾo94-s_Fc^DRCY珦;MCG,/*dLl!*aNd$ 4gr|é-Z 91w66W24e(Jmr0RLX a0 Ye駯䴮NA %2[M^kwA 6tay2[$QGz RY&rzgThXx*p&nC-觪@q*ޗ&sgi84I , $3RΞ^ܦ6\t2:S'RSk.e1jU uH4$o7u5-$?̭h8;&Y:MRfo+]Ǽ!^9fnϥeՌ3jL6jHm|vݐ۲qۜx53ՠ;:͠Iqm^6en0+~6ڠ}t4:E^NoM/1; @nǨ@t-=0VYkA;MQ~f=f$ t^!nwj1;oHj )Tf`GwƥYfddU\efpD}7MT&fgIlp+Y47v8e41H4i+ȼ̦h'.*+1nOjP©WnFo8+W͍C'C"vgd̫=F)/JŎ b9.T˳opN^0 B\9VΎ)v~JpF%Ohf~xc&٦stWz˛€|ϢC/ qأAٮQ-/vut%tqmGܞ̳/;"Ymѿ3@̨_ILjUޮ1#9.+~P]]4w[b`9#m:r"mvFFpBl= txH} |k[;ݣaF4PbricWUnǥFut)WAWH.EAFC hy*UIs8?"%a7V>XI\jH9FUhd͝sv2;T)k[CW.s\ǏVJ_ZJˍzMkċsFfM$,&r4UM.R-c 94YN^Dc{6 >AQ~۵(g ְn=u|Wvi W- a:ͰCtjYOiĸ($$Z2o4ڑeX0Kh't&<\PmRY c$Zh)(OґHLAއ& ٤l RnCJ!p]snjL&i 6J%k5FDׇ 3lN.W1[Z?!E1Ȋ;cDl0iV-hbeT=Y$xyI-ީY_܀kaPͤXV #J{=_ AT;}B`I#&SCh#OKR䊋q9@o?6/m<<.rs`!?|D>,Oo~3؆JJ1@ !J[eb=<>-&=M"8S4t Pl=#QYK%dzg xIp^=K(nc)d`z=p3l,DހBUOj)L֞}J)t.fv'x栙bL`UN5% jx# jh4JLv)`WZ;r1X923YWT9CÑB 6dd#)TcЦ '87 N -c/2k=ǥB& G82<@qh(-D"AD@ÑBǗS[P"w #*CT,?DJ;8?8\,(<t 1KXbPsﻙNNۀIRPԠlpQ@`c ךJкWwkFlY Yw{+@#R\Bi18[f<.?wAz+OFh{9NQ3mՒ kl.}q4<[5h0G(N~nrP(~Q.k|woyabM2HTI]@jѯC6c2 a |3}xvV5>@a9rvZٱ +k3 8תdfuJ !e^{{͕ml391\#(L:#wbjk.hX3&f!C ;1 otrQQ@EGcoq\-h:>%^tG lp^EzHǓeKM,rP:ıG98*&fEANG}U/ch( 6Uyj~|^vq"97pP$B?  Gm:E=HtQ$C. b֎@]ֽt5m#iI}Sax~}noHno~ k|q?:(]y}u'*(pW7?۴b3|MxXwYr&| z!߲Zäڋ "o<o x0(~cY?M|kߗ W0^6?-2/FsB,f^JY6L*mnJ%3 ۷ xϫք꜓(P;ɣJRi;꼨h;n=e|~oK۲|0my^kĂRLTjjB fGۤF덝rj( _Ԝ_5jp\PD^;D_5XTqY) k >qcVBʠb Lޓњ7[W5 4E! `GXTD+B fMڭ!jDIt/J 6yӡh:Euzd#{=S[[9FKixZ) okԳlgL\s@vN=pg]5HU]DBǦT=XoO>UJ:BB-ܪr&΋Bs;Q;ZS (oD]r.%wQ(zv؎U Ѝ=zgx<-(d:[ǔ45b0%0_lW|p6 M;8yn#͏K)!HB{J)Fd"@o5|b/TJ'FZ+x%-[aSfs2 fU  \TxyO(l:&,\]3W" d3)2MH.*վPN0]T8Ot x<"<'vWWp鋊ˡ)aֈԃ3F̎#s҈Yam <ˀ+D5$RHa2)X?Sϳ1h^#d0Rq˙ ( -f-k6s|"1\1i&}bP !4xkوʎJ1UM#i[s\?uv3gk?qg/=ptjWw"m>z]VU84 r8rpW`YxCN4"hG|k2I?xe* JtIw3u(6[Ũ0 -݀n %XqL35u# /g ].nq൳ W%w}hA6H5L (0]5 GtdYg8) Kx6|CDl7ni[23ʣR ,Co(eadWXVVU8 sۏ~|2 <eIII'*8Zl&(r(gSef2%2<%2GzG^I=uS:$:Wg D .@x`i47;!? ϛb۱D/ `d%)M2}X #$ʭbG㘲d(%A@2AEP62.20=T͎/`9XAgd`=(ow m?%ֹS(CC ?+4"I 1JvQ}߄ނvrp9!%aنv?Wu@}Zot~zZq{+:8xi3\9{'QRxŽт MF,==2[`PF26sP(G\}e^JBIMIx͹֥gjse 9RIJz~那ݣݷ8_s]pc hYr>CH4 POŚ%P:-_쁣\O{#)7O&|G;6v#XH 75'? G ӏylh92rLj 큣@ƨ,V%b;ꔧ뚒3r&9%&}qɹsv4o;AjqY>8w44gt.u =xv!)ԨL?e-v;rF(/w^wq֭sCi?gh]ms6+lokx\N'v3zas#K(9u3{R嗈2%8Dspp\IqX6Ӵ MĠu55NI7??b/X >+Cf4L%h&u/XX_ DŞ3.TJrCwu뢊JԿΉ7Rbq0)h{^d_t5)#Ef\TW 8qo`D5M\P){^-:QxG3R8@ջ]+uܨ%m|N<\X \(FY~l5O__}42ES()Ukr?K {z/2J 45qD[f x Zq|fLcN }¾n_k|^7 Yrn?-"3:O19zSF઩&rnz FxDǽ^'0yFxzH`~5~N|ꚉ+=yM&"+ׅY>0K.{5rD--ꉛfޛ(oT=j;VS\#*p>Fk!qh<Tgߏ(ǹGO^PA@z {. w6Mёx3w ȫaY>*7av͎9Bhϧ :FAdhL"1"Cg#h/|wM b?öы޳`Y6W~4pkvm{xK 1JT. UEKg.|:ld:dݢo=5$T dv-mﭼ?/Q`I,*gR>,Q{nf B(|*ge wMC⊰fbS+Q)uaK2hݼ7( hL"ȕDd8W %pȀ31v]~(;1f,!tc4Y%PeSVϝ'N0Z F[yJN53SVqӉV,9TC6ʉ(ż6xú-ȕb~t*, |EU^7 X@No25"S07Y%t9 -~990%VeUn*48C -1KeWqi".hGe䧋pt#D%S W{_^lia)5`]& Ti 9QNoNUe 獉: oUO +4yIOAm̟JH+i^WJv&r$\znaSM9#)u,1r]"˅.d%5tƵQ_v봃@!0 {罨deb -?wo+&tvI@=D0Ֆ:4O!u"1KRW]tςx+ݱ[c[,I5Ds-,ۮ)FHbJuwV++Vp DQ9͝Cmbyג'ԊXaB&ҚOw֕|w|yɇ/h;[E~U%{47~OӖMtOi?N̰g-}Tm3TXc" -R0=N&wͪI^Sݤ$PMޣͶ2;bGo20Kcev^p{Yc? #X'k]'_{%n}b!lmRD.m+ܙ).[/νQDDzVkHg8&`LL -ZžjF(OKZ9p nTTDP6ouR5ZE_VPZf5~$ c5'8ǘເ-s ذ xJzW&1fxV4_4H]iΏmߏR}Eh#5[SVl 籧`yvtZޑ^?T7g%g;47N?u#]S\<|럖0iSzBsxڶ?c8hˠ~-N+=+mW&ʚJ)1Ob +7htwu'Oo A07hɚwu]l@H+/.|zr( %/:̇Y)4);vyS(31\saA R6iPS)7^'&шXh@(-n|&yP\+Ȯ^ٴpʹ˘!̭jYUҰ$+fP%if WT5nd$Q0GYOEH8r?W1q%&,ښѴFѴ[{4c]T\ؙ(C ; "c 31GIx^Qj,[+3Z&\S_3VL3V~oor>Mj]޾]}#5~;6o}LZK뇹(B׷3TWUl!RDL)#q@_u?=wEx2%yzeV |0|$64yV}} Kw˓f|o |1j5L*bb`V*P/aW^o#ipܟ%pn#U<; ﳐx:X'ʀ> ,KOdg**r% QF+9~9`fѿVHJe<[?&YZey>"}"AFC_/!c;qN|HZyU*+[SDu9J~'ѷ'?Aidp=b ;X;yG|pRYas33Aq{qWZ7KUSTd{Pm_Es]4 ύЄ[5WvV\pdU Ƙl Qv@F7-Ovs{GdC;לs73~Cơ_m @Gȿ'Z_nIY9u iRsۆr[@-`vǸ?J_Tg{f>7TM~Pa|lh|#UН$'P552HWN+G[ Tj:P;ZDL_H`\:?FGKu>`Ŀՙh[UleejR|{n. W#2%:7}wO$nugG蔊MT Xi$JT$ʺ=(B`u_Fze+ɿߕ?>x9fpvPڥnFld~Z4~~l>U *VU48Н3|kr0 ރ~ULzz6>G|ЁAP4dWwG0Bf BCR 9'B|N^SaE |hOUʒӜ ՙf].cXDOk@FPk³V?R N*\nN_I(1Q"ԸXY4Kc\"o℥bKP¡NX)1)PErm#l̒7Iu@)P ,tgV^ܛIm[qoIi6`Xc # ,;!u@Zlmު{fI3%`U`!A"!cL,JYMg|5s(?I$iO"Czm[eJ#t"$֘X*uZd U[aMR0nR$[?h*5s#h?1:23Ä08BהVUЇUM L@#}#4AV% U]s4xv=^?'G$ $XyϨaF{BBYJ 1|X!C7'2<k,ub2Nu)څ&:mOx,=%C3Ц 4cgbX"RKìħi>!>+мFjRo0P'֩3b@& Qrf瘃S2sR!Cw2Ww .T_`VR뼻tTlPЂ %vҍ׷x؝8&8Wrݖ\KOL)D-yW)iP݉5DuIv07ݑ+d@+ /c' bjr57INU[hǝ=Y,41rxl58cL !\JG@Ṱ$mD`r JvGb*Py$*fxE0TSkS qIr 3 o&xvU<;Q5IB)!rj-k$H/eT44dMo\ )ZFjv4R"]FZ% .IR+`TlӼmMrrxqq$P:ٝ|b4I:X05 p[r5 SDY08>BU6dU[mjD~X[iz=vƟJtJ4IւtE̜ 3ֻ',F6&\UVphތ֦>D3yI:W|D KjG:ݘ !Nfƺp`NVSJESM3v-y=ڔZhnI0%ipD&ա\ȣAʜٺ2|ؐ X8MF˽:OhKL;HU-zpۮV4(ͻ$ Hoa]Z8؋}-tg t`@J?{_F7  x_y߻b8^xe=J_w? yDO(Idѣxdh .v2\_N٧^1ay7GWO=|,GΎY Ñ_)-m&y2o6j]hcaUjTsqcwmAUٿ6Tl4]4"a~HdI+7Pq[ UۯւiosM%zpu˳52B .q?z0#MD%CFpF'Oa%YL :dVP1+]1+>fFNR5>5Boi\-=C6wȴcmZ@Ar)r{*9`$0?N7tMd;}.:d`-:c.%+1[@?4v7kCŭÜpLaH䐈h#?:aބے-6p|ߖ|BےH@!Km{NiDC7}&FT,oA +u@.6niAfQȞGczϢzmBv榹k.mPV}p0gE٪%Yزo--J^@x)фeQ_r#؁` YLD1wivP&h[6<9_5zGJP!8HW~]SoJ;w*xD-ǨInBͻ$fYΧkhNS\ 47E ~Q?(b}CYW~YtV~n&bB||ӫ(_Jn|Y1ya[)[,oWf?/2FU*4s`Bj+1>@;dz뉻O'?kg[g0Z|e/]1F{vqEӴ|]1EYƲ KY@(r`]s6UGuxKY){蕡*Q+ B˟z)C< = h)}?:־[wC_L,/^ݐZvfDsr>Bt_~̦Rܻ{WI!Yhfэ}]?/~4,K|k||Jx+ﶋfy1F0+OAU~昚k &5eͧIE|7SQn3}cop8]fA8N!L\xyhH OrX2BD_nCK+5"<1mWG)a| S(@\3;ςM/o\x;3x3ok燔vF^.-흣\>0<,>wqasFc'Tp"L$FZ@#jTQ \xX)XMb♰BA10hÞ( qxvxWلS\V R[_NS [tfp ܡ+ ZlqJ5LiRXٗ8+fv0Ke|3ZzAI'E}cI[6tG }cp*U8_(u/ ӱ_ Gm}_ik'_h|2xe؅-hF$8/qZ)͝=1n:Qa>jjǯް|4[9o! `<_H*F.q.4$aXVBD7X̃F[4[6ӄ Mؑ/Vz}. }(mr(A]SY7J@ࠋ7 h$F/;֬2R]b";cSKP&%}ykd$3 l8)#XbZs6Gc9GD4MTP*@1nu-,c3~,bNDرc=h*xܺC(i޿x,%/Y#X8} sMht\%\ng5؞|twvF ˬ0x JA?O1ӞFR/&GE>䒳&ey"b4I^?Ou2ؙ". /-2)LS !52rR14T''JHXJJ{Q1%PIj(1\ݸwοc|>ӦVa~e9J*ᾯßœO/^%?Iؙ4$0.M3yi~V~*`wK}_npm=2;YW%N6[|Ivl[> ͗e6$LYQ⻜IJkl^]Lwpl7 &?˖6>wF6ow,"2E.u[|'L?WgS7_C^2!r)uٻdW?mv/Þ$89Yd}ݴk~HQCY8XjV*KyYp.jjrS6߳éX1tXpB/zUPVݔ~aV~^-԰",xlw\o)|.'}t,"[T}_eS99LyUN;MRexs,;m-EEp)~(Nv1>ܼ9>gY.PɆLJőT~\.Jlw\f~~juw*rO/ x4&Z7qU.p wQ/^\]5)uͭMbhIVO&Fk_e8AH_gY-K"CJ)Y`'aqr7VIݯSϧoK~Y?ޥ)=aݮW5moTTH /W"AYAVLی."i~`؍ޢɿ9;~YxOɸo %RHL&e*QC8B &[}_T '$^oJ!`Q()Y < Ӓ8_ϋŮӝ<*P;#LW LE-d#pIJ21ғ614$#)qU@zWG'),Ld +!c7#)dC7YF}1>},k8(KIc:Y}ßc\an%93!'hpc͐>";L`^)H k{{S5l6q*򜄠@8@? c :mn1(B(\~yhj>zOy#-<*chGWv@X\y2sf~9jHT&j 1 lsQ65lAԄ-(wFLIK]3h$̥#!*G\ocD4j(Bm$%]*ꍟ#9wls4R^)ŏw۴.6eZb{\Qpq߼\?ez>՟fe3; 3Ap aOߘLE <]'m$69&t\ Lƚ[6%QVdI h%|pw]pe&<^f? Sqi Ѩib Q~Xx S'{p] .QmǞb=iNkH _f!O?OS6qB~F FbVBYmH Z>"GrSÁ _Il& @d M,KЉӴ!] JwYsg>zG{a%Pۛo!&0<%<Ⲙ=.!34]vM%W wnsk6mˮ[kŸݭ=ؕyc`S&G}VR4kHGWv>/yO[f:h{!OFU2lYM0aP3yS"贈ͺܨ[ _Kfhcv۱؛~kEj>֬bA͜wr0D)/,0df9S,iwz4j,y%|>wz}u8NQNJ:`3|8 ]>` /?a(.i {m9ȹ<"ڟEvqZMRG 9۟&#+ˢB_HBB-qc.T v"Khq#g(-rjWxغNá^,(rV4|@("w]!.8.&<uÅgD+CEfC8%I14Y0Ͻ}-E"En[_Bo`Ti< ;P:i>Gjwv].ZxLN bV;"`` TgqIiѨESb"+ɓޭ;)*\FIB]Ay5A:#icRGk":iFk ] J0Pn& Qb-)siFP\A4WwƈF+6yOJ1k"{9>c3z5:OI+.lp2қu[0MEWFD=}rgIc{ nJBwDŽ߁(Md02m)FdL4x׋i ' 0m a3u$$[&@ Jg)7Lj^" .:DM7w݆D[k;yJ&UҖlqrE-pDM{ X!(P'Wv_=3]|8Lۃe,yYcF|rBP>I2uQ;}4(" wWջH&|w透}弸ζ bvFX'W8g803 ܑ,HDHy_^7m *` CvyL&D$\f pL7I~Du hþҫ?=wgOoـ){GGrOck.ѥQڰ1%65uFZޓ}Z籋h:{~&-knC!Cׂɢ1Ri9rδ% i8CE:!+}=,h 0KCZfϷO:S-l+6Vf ՇQplVt(\*ޡpl1CR~wD,iŇ)`$)HStZ;z[9뽞و͙")fMx>Vu"e>UFXّ 3XL9iQ[>#G!nBSFw"G-^:Ց{cV cB锰ĕ;KJ"'CxvT)ӭ< "jJ]X|a^'v,cŅl<I wYB-<&\ AZɦZBr=TKZҀK)aQŋ[X4R:FHlJ`5a'8vxIE)f?rz]{]=jcaw,qێu~Ύ3|/a9.QۻłbK Z(6]et*(3x>Ŏ2[u ]y ȡVXQI/Cvy!y-1CL #1}jFRczU"f^u͍o}FVU1mz;Vû5MZuxz30h-֨܌׋Dk?ͯQ-Ah9+hX>}~\Z%uC;0Xø\QTѳ(5ZR^0Qk܈(e?5ZRW(c5ZUz\*Bk%|LY HKvZy(i!^IzÝVI*s J7ʼnDi(N #NkBׁ=TȊ^ds[Z]f;%|w~!x~a[} f*3]|LG xv-x4@-+`r`5܍ua7Zؤݶh2}t\X wڽxV[wjx[&#sԪEM M[֏ivR䔮 R:r!H+FY?fZk E܊"++j=" Qy8Npv،.aaDȒ~!r׸W֍h=;u;D\1 q+4+`|nN@u#stB.ܘa+*jWRH%az<`'$F&.&%uMC&0MlDۃ /W94^5tnXvzׂ]<t 1F_0Q#x͢S'Qe$!j8Γ,f}7_R5WkEnW(3vBe1XҀF?_^tnB.40.k@??nx\܈aQ~; e*EܦئP,~~iտJw2M|?~lo}Y2~P'k3ؖ@bsIRk] Xi|i G#_;͈/_kzG]R 1%n0Ul@VnG&Cd^5DM՘o5# L$T$ߤ>Õ֔-YC2/VHr.~Xz}{cE^?6ѽ#Z*5t18^lu/h1Hv{Ǎ1_D~4&&dm*vl,ߏ%ٲܶV[և.&EYdb}ND(U!9e:=RSkzAoqGiM[QT?<zRǧq,*[`IgLAף?yy8O垳$umj<Lm1kQ ^3(ζZb@݁nK =x8ӒRةp߮~ m*. ܪ ⃇ z#wvF_~E]bWM,Pz/SXf=A(/bx4O:~#:`8RS3`<@7rr(~5?.4FqUCpmյZ T,l`H^soh@8@Q=YN餼y"b>Qsw@_|dШYd9 '%ch<' fLs d3-./wO_wy7n u&4x`c# )Y48Z,K1$$ NM}a6SΟΕG(39w'a,+9SÇ[cp4XrX|)`Z蘞OhlK\bFe$OHk4 (~|Vɧ*lŷFA`[1#F7[1Yv|ycزrxM W*[p|^ AxzԺ\g8gpSN;1uC3Qrs:֋YH,Z _"P4GQ=]{4b?; h-44ȡXg6k] ޓw-[=>贉lyt㼺Vr|[s@}n)Z#i1[%%<X+*OEXJ Sj3&Erd1dޓՖQ]Py+d Y e,I1i#Q}+Nw,g}ƃfG )3.b>LШGaK׻T~ؑ_p>>3d8 BpY~E!սV08JDS Uap`*'S?w3dēL<94)V剧T{3)(4AI>:RbF#ڶ|arWgޖ)6 xSt].+ԅ]8*緝x~X1t8q}DhGxOnqmڨb7mwq}kNy&+K#lU#;_m9͞%t.TnAow=3җ[wAob i#! 9Lh!dr6OLpqّ>X?V#zE+:[izl=0vEigEPHU{4Ņgh%rSb"+5k?MQռjg<퐝 +Ze}plsP%|/Q;b/tN|BC*e)hͫ .c5Y4S5܊>zE+͊b+Ʈh5(ʋL4֣Rϑ0\HEj؏b,gn ۖBA`W H;3NP3nṬZ-茆_l٨>.\q~:TɫwKf |6^17ӼxVM`r&aծ+￿-p3}iV%A*8쩱7}JDr1GMN}p}00d(g*?ujm9j~7}0tmWf/~n._oeooMk$h]9`v闐`Fj7[q }u914-n&wUFV&z(]7!y\L](Jַ1G7[p .ؐ6=N)VP;ίiZ9-͂&d.9gi'4BXs ̩iF\=2F睻n]Xr.^YZ8&Q>05u!PHƅ M}xePY`X2Ӗ%؟Lמy:Q(Tw]C6)֕] ՞ J OLyه]hC汼͠'׿9W/vSJWWx.!Y8iiίwCNlY D}}irlY=oc:g2\Xr"\{ CYT6d0A4ğ&3 7d>HY|zO"osi7?O6L ‰X!Sk44"@H˨Z(br](M԰yYcٸ՞[Y1O+2) Ԥ79i2;3f,'!W_ӯ>i(&Lf VΘ\.z!  3pRTVkH+~rD`i@ ӹ 'IfUSaRt}2GD&ޥJzWb{ϫX,ŕ_/?z`s}tgKj|\M3aR@ޑPBs[f?t靈:D< &G$ Œ\\k9[í_ ۋ0" (u)!@`N6t-nv|7)]yQS?.:ЧI.?ҕ[Y Q.۷ۿ5SĮ.eD0 l}OS$*yuzW/:s1M~zRO>Ɠꛟ/+n6nG&yJ̭'E9H6+u;ͳb<Lʟtb15yPֈ{lQ\M~}r;L_Zx򟛣o)YGDě"]NAU#zp v*BoҾ0/c~<׋:ZvnaѩMpڦLp۹a` <j&$~oNO''B&O>c^'c^߯[02ƚC h"&>5}|s<145*xTo6`jU^/|EO;.W޾D۽cWYV\B :i1nBc%CxF5ipK"ib1f̫WZۚcF7،fOȡ216C; { ٴA>sHx"$Ί0*iP61)(~,ɶRb]cEJ+Avx;cgO؝hsPCCD%48n4Ax$8n;ָ(,L%Q 2qJQny*,l(?ĘfOMF:kQt]1MiXw8Ns$쎎oE X {b 4i1)(UzTG(}Eh2=ah<V)_tT#l:ٷ*`RIMIik:AutШG̡{ܜ7{Y=ꈳ~ 1NM.$2 T9}ubh3ޓ|m:殍I[Ǘr-ZGcG#g "Q4qtA0Jߒ#P=˄=-w;&H l. 's9O(U|&"spI`4 Z j2U΢e!wwgj{_1-6(R"!v$p DiXr,߯(ٲvɖdc. qYSdX*u;Kf@co8IHrno <"vxA0U'a&4Toqo7`Ƒ_ Fh|E7|]<9yF{$eX]CBȘF7do(F|i/W.|xvH+Hψtq? IbI)YlQ3bQ3RoUk|",nr͎d4RyKʖ8ݢ!iƓ0\ZBN" 2R<ɥ5AtGPB. < -pR x{G[,R[rֵ 6>50@mœӠD#w:"I"H@1H@E~)r, M=sbqw\Ge=3ʭsWЀ(̒51/|R]|Lc2Ŧ`q^^Kwד@jxauE͊.(KQ,އu87e]y=v0_k+cDqi,=mxԐ#hB"!N%OV ,)x7igW^&% $c C! Zx|:qΨ_P'2Zbt'8ԩ|yIPQYFO Q$.UkAZx/=?G2.Dݼ]5<_i6'<(NPz*՛CGa5A`B%SrZx|~7k E-?c4]vTJ|ߏ`ÜW$QtbbyHZx߻! b?љVERbPv%|QjX0ԞӖ؟%(B}ɬE`lNdaO]Nw[xi2&dtZk~[|epv18'=23Q6t#^_?Y I.VK;EJ%mLlD+uOY).} >>Ӡ*ḷIm ;0 ^WbQIFG J@/קg~*Р8w4}Ċɫl108`s:8cs'9Qe(^ikr|qo+OXh ܘ U8 cd)u@$3^=-նx/-cH$)/A1%Zx Si6Y@ pRH:2tʎ=H-_Ok)i:IgyaETškZI ubeg1,4ʣ}%ʕ$qېKUk⃣#cV__cǶUfTƸ\O6t;}P@`^=_Nem:((=I*뻋?ܱ|1d3D2Q4NȠ`[t *EZ,+E'%UB\<{X<@v >0"rkʻEuc#Tp(nҫV䪰Q= 60?c`pD)DOLBUnT6$ \z6Y%\58> gNǙ!A'΋MHM[  ִ108lxA$F$:ydJcC5wpQ؟Zx N c$1L$9]<%Ѩ*W=2*姅ǰϘ$.cNoM xOT=H_Wg8PBP5bR'zdLEãMff.uܼ%WO ټbMʚKw;˻I9wy^ާ=v =߷9oah9~$ <$|Xϗ/#lT/:Yu iܧy-~Z5ܫ^ ѼgqhB}叺9-еk D蚄J=xRMd8YX Mv q0)˯/9^"a{hR-Ika"Rw.Uqa dYLͲh{7yKs/&h33 (/(%JPwHpBF.kT$lXYnQKEu'h"T~A)7D C&GxM OаQ1oy"T$TЋ `':`&Pz;=jcC* ѕ$(o١Z2QJ|p^Kï ǸƲc,18vljZZ~DQXVq tli,u-O5+0 2ߺUR_7&b!nQv|pXtA^F&rģ3vC-<M OF5Ƚ Og)3VpBR-NBoQ'o7+T+/d^LXmrN)g#OVݟhuWfp/t;Қ ^[Y#c`\O/6_T,P ,`*K&YRrf <Ӭp=` `$BROLOレ<}g*B*aVm q"(}abNlYçļ9X:-$ؤn \.wgW^/|_y `B:2c`n_pEك!7=:pXQo~!8cߛ> #:3գu|6|7E+G0nf˥A뢊zA@F0X]T=5?\z|<UfZv(p*婓K:^(6AɾS֓fClbz2db4CIY O@xi|S70x qjsk`[Dw؏~"Ifz~ZKu"mQf0ĭcO1 7vXˏphЙQĈނa"&Vn'8!6TXh{جCл`E VqQ,b ga0K{h.QʨSJ9s‹'u(i[x Nz4{u1c8z6Ϊ%M[4]a=>h53 @htmuk`CSJG*9Ru@ 죥L0˪a b;Q+Ep!ԕ`0QEkw{y=EXRTv}>[ ˌ\ƈ_KCZx 磷E^v-vNG=tZ-PE >Ruoޕ_|a'6-ä[bȲuQ^@)8u:Čظ Q.n,{0Fb!ibWk1bbDcc\# ʄl.t]o0C&_Tb>Y-.우" Z4,j`Cď$%Bo|R:˿=m&-Lu~U'X 05e*[!\YgYRC>/jо8 \E86Qr0ۜ"ZڮMY-1~/}^pWZ/ڗb##ďlGJJ㛘lj7@D^h,ӑ)ۯǩ$ntp3l?f0MEadrX0)ʏU;V)J#;Ic4yy٤:4y3~@.|U}% vԦ]Ƴ^iL7fڛuԏغ{e30CQ, P{ gzt*hK'E쒾u$/mEOzoTޛŇ~Pe`ҿ6d'AH1LXb0a` <0=|t |@ B HagpagP%.TVKyгW82OӴt␠^a20B=I/U|Үz4ͥ|`8xcW:TT#dL͘qNd|U,)]#~n0=Ʈ\&ҫlFT10_$N# M~pMb7Hw;_T=xݧ,@gz:$ICB T90F?9A[%bTQ~_Gn@ضЈ<vk X^`EIH`4`GqhHccI{#+1wy% %xI>%A6ɕ:ƣ _>Zt%Eݝ)*XUc`"(ݛ ְS./ U1^_^~i&Wxy;p K?{'iз8W&IAeaaR~j~2_i\@"A!V+@=Try ?YsfF]ʍ2.\:J%Q&ՕsIAD;XKUREC1p A8%j =jjOQ0=Z:p L3H_ Q SV] |:pL?׿4So.Ab%e;XޙLLIca0襍@50$₤jhZW6$*bV kRfsONa?1Xȝ)K$zG ̑6qӁ3q:\y.'x{G4%F]PL"e4 "Ab'4Ϡ{+&:In[MrB)ƅPΧHR-%Yf-j8mX`ZdцIXNbwzyp䮙ggۻr֫1msZƣe0}[VSSgygSR$gD=ьrKNE=nizKЊ1Ԋa)o݃ih r;{/mbұۊ=Ž)"ć˴Ri3%'4߼glރnރƺt̒byhiEdI9É7N47$fYϷubwc8c1cw&0{Ro*YN}xrC Qӭ)EƆoLZ NcrOF_/Ty2]3՛娪ژ U.L9XG1aIQYODO|%9tu4P)9Yu󴰶&J'7any?Wrø>siT8?rTy+$mX?8okv!Rꦷлf:6q{|7[<(MSďCUTNF4LHP$VuT=X/QKz u|Ҁѥ lQJ.ʘL̔"5IV9kDouNԹ^xAR2-rP;T3J 2K,ߓO-UԂßtVyтlmA# V6xU3{FtXԯ]zɲ(VŔWͺߝ<;[y&QCLף<~ <~娩?iu|h;'R`5֍wɝ'ۏ馓U;` a0xQ֓?'CBq۝ ʟzVϡpȊ%A_Tâ1o@fHLI6˲cd>#6jY 1a`~9Z+uXށr ot/tp O4mʓ'6@rbޗfϡp e#bu#3xy}nYStR)D1k-%(is%6O46%Ze!˥#!*G\oc폏QÄXųVӈ^Jiu(?OU= WcB28C(e%֏e" ɂ#Drԭp5=3}.}w_fh:c(lJ?3Up>0=OϊAu\ˊyMY1aNZKȁI,x/JIgYd$5"6VG31qlxˁ'fݥR_J~|u7ݻ7gY1x|Gj *>sXUj1kӇhI򦦽.5mk#~+mM{S*m rrfY.x҇/7ECrz8K'WWiZh*)Od`č[chL<+3 m˰njƿ"YސZ/c%/`FfIF@ZTǩJIF% 7N78B?;WͧVMkfk{zTz};گh/3(@-MGZB['`7W!SLj*Bl@,Wh/B nI26ۚ]w#0ӻ1Fe|]# +qkG/.#0o/exsQُxy,HӟI`f?gzl"PxB5Ӻ(M^l~O^H_]8|ix{;9͟YӃ= >w-%WM9<|/!"jU(i̒ɇM)aoJzSg8H>%@[)vO Zgjj!J77 M)itɰ_dKN bhIiNB#]^:(E0D90Dd,-酵KÜN-;qpXu2qvtrZ 9РHâ6s|Ų_]u>BË́\P %C aQxr дշ"AhKmr|RR`+!fN.lu^F8LU/AU/ځc`ӯ'3OMm(NE?2 ]pp:~2vJcwS*MUgz38!35$z I^?sT2 VGLOժԍٕdK JgN!tkUv)N'vN^RD"!O}2+){`bv~: ̊J \4./Xȑd==\{f'RpHPcK0E)F\(yO:p Let>:UWʹN ~pFcNR 6C8kyӐ0Z݃d-h@M eBP>I*y ?oK T-p|Usӷ5|vyS<-URČ`xM,E(aT'aMv8-'Cm_'-|<|XXvR:<=/uڐheJQ>D|\@cx~ҵWE m0sNP`HJi1'(T+S;]~r^~5`6 ʳ%΁a"@i<LcO% 7<N|!wgwqM\SD@-UTEmaX,#^l0^Gx9ɾsE%IvprҀV{)s"5mӑ*4S Lw=3Fz7 E#=G ؈Zsj{Uu`Dݖj@Q+8Xkn@zpx8@cC?ia=4P)}$'HwSPaղ4?܆w8&NnN3Bd<8Azyas\ȡ{"kk$vwՄj TJn?*E=t8=[uz`i8@ΐ@Vsκʚ nG8G&w4D'ٞlubۓBIv|x[K=`x Cy k?0WC7NxrI`cRáӏC]\nMB²N!T&J 6+Rprӕ Op}3uVSKI`NFLv*ͼՊ'N>AnM6z#ʌQ>R) `UdM_]dІ>'DŽvQ٠![ 8+´da(Y+uf΂V\` }9Jup{|&kD9MGj[qgCfB̳`X!ܞ:*O=H0m-#!ǁn s3wS6a2 Xy$f‚cL4]SVG4k+{ 54p,)l u)r܄{n3#{CS 3MGAwU>jfGx};:f2t`wNnq,Oj1,d\t|g5B3{}<,[$AXBdwU>ulV>THqYUݍe* ʲfSu:fU]r<~l^Jvd&jϪJj #O)c,xCkN2lCUxVryZ[ŪɱO~])g(YUqI)~jem#( jas 5Q•Xbs` .(d!t'.w'i?ijH!P/wRA:skL EHX06 FNsb";)RNJTb0f&[P"½ F(qX k8hB8/?yzXdbQ(ȸy0I~x1\\,5:"S|٠yU9oDI>̿E<k; I=1Mݐ0L6v(,Ai}ȇ\V|f.}ocx3 zzDTun:3s.6=-B2̒lN"eߨ4*@&~GW _rŷ߽?_xWo{wwx`qr렷a G @=bƴYT6].ޢ_$yCՇU\~Kg[*o^~b5aLšEbdn5a'~6M)'"qu:*\܍/EƍYG6X'Ls_)&r<㭻Ghb0z' IIF[?4B`=`yF2 &qhe I:va楥|#Kv^Լ5az͸?z;u7NwԑD Q*Oi'8A!-NŜ"O:la2=R|熣xd+X 4aō{<;Y=7Si®|%|idJTT$nenUΓW9Wn1:X'u Ť$p|^ߤ?5݃ƶ2YON&x&|%1H:"6iN&n,zx37D:E/*h3 2]q6•5\0s H{ɱ6@SFO,u*֪ 07o/DZ㳵yH;'y0meGˤUm3<'ms&M3[aS 7]1T)b+tog^ weJx-ZUv#lubc*Y&iu6\6]!Q*ދ)J0Ko;sUi꫹%· "}(rMl" S(VH7flҚĿ,q*ĺp7~alΐUZ@JDm@6kh oZYxT "g$BA|0EJTljCO+L͗oV8]P9 9&r6s+3uưX+4w9lVJX+?"#Ϊ}g#9n =UC뛋O(}lgn+27'IJ)!a\rG;̗.BNTRmI`)$ɾ H*O/$낒SɶB{r'H?5Hͩ# ֎nTб=> NPm=TcTa~tq!<&uR6eF l>K dL`ڒ 5A[wt µpro3F\4' qᲥxvv7 gTX%.ZJL*0"~ 2LCA(~s;X =!C0E4^JJd 1YY s|L6 wE|,ޙ nF|rv=`io e hx01Ekۿͱ#MȻ`ߜTKQچ|>K Bה޳JʖXA%D䉭'Wl+&ZlkW Z a愕)Gh0\Z\v,0 NfYҩV:F }Qه!sLLrye[x3jg>Ngz4‚S?An% Ho޽lsd;sC݆aO%^mz'$IXHl:~0L {;̇3y\ʫmzTzaHܟ G"}ozBcOІO+TT)p5T dXY`,_r% J[ .fަf8wf YiuEtHo׽U,sۏ1z kSګo[u[bv:6 u6S\̕Z|zjO{*|EX0x!|$ YxT?;"]vkEmte-Js`}TTZZTT6fU&~(Qo'q?H1WtƕɟI~ys<= d~Z/xd|^Rޙx,ޛ`߂:C k\klԋ]+C?[ڽurxí0n8bDx*L 2n\Ӝl#z~eү PxQU1ʤ ^3b&i+MU$@mW{&6tߙ֯M|ZS/Nz싺B5ׄ5.@T!rؗw/+ Viq e?].NJ7iuC=fG*ͺܵ9nc-L7~t$jo+~3K ٹC>I-dx8Δe6XOk7;P&t6ٙM-tJ ǚl|Ffw,bZB:ӻф_]I7c43*֠6Mt"3҈C_KN#އX]dkM'%RBS08Z?Ry/~J![]x?iD7#klٞ޶g=UP 34cz!11m1urv%6b$|M]0&"Et.$z<̿SLL99粇$Lkɱ7BGJ9d=_"z 㢪|0XX7x盤F:ieJҌ;_HkeT}K?T˴J Y/pjL?wYZUyEeXKUj&ZPA0y ƣ(CFX=O$΄G񔦘Tb@3z^W*PvmYc'B&PMT)hK-g68+2M6[oB3uC渤0Zu=QۈB ;{%2ExSIU"xM[19R Neej{ۺ_e=w4.M.(6Zvw(ɊlK:^m[GC><Ùg8RZjS=E+KhXJk(^Оl\Y K, -;5PLɨ F[杌¬DF,\0BW=6#c$Ϲhm9HJ"[-02Oe-@4j - !5xdyk(IAJrc6䦽GHh^ [41U*Lhz,O Y Agc -[[<2ɌcL:C s"F,ܛ9?FB Πi ^ g(rՁ;s@|HTHh^PZ&G@+N*s=9>~Z9KxO1 xPc2Wb hJy -Kќ ޛ EJ J5(GHh^~R(p+v:;)s1(5c$oZ(9PȊDڔ<9?FB [47J'ف2̑f3^ŀ#x-ɛBb"{@%FKhn8j>a.%H a - HhXњ/PUۺ,0I * Ɓ ݚؘA2m8&E HgUTCGHh*Xס3(: { - [ynz3u=*)R40Hh^m>Ix4,&[%OQY`RD?BB 7u{@n HF}Aq@HhO` 4RŜ\uajQZ7B~) Dɀĺ:<ńh1ZQ4W9f1)L*Rl#x'j>&2ME3d=F"QŽ1GE` "5<VB21FB H^EF   l#x +0,y ~U_iT% @T7FB 8V"g9ii])QF$z.)bA 9c$/i ZރLZ4%J84Hh^)zgk(0]Y7Y#!SPPl s~@:0E,8i#玨Z7^H##Q)J6,Po2bq<Duc$ojmKځVqLjY@ 22ͺc$^+ΛշKUQ:?Mo?)\ ^2tr\?8xAqRYw,y݋Wg^mV'? OeBs:9YK=p3Ⱥ= ErFѼRij?FtuH\ :jHNUq`30lzw9kdRwһC=g1IRu'6:F A]\$qH*cl@kTH{%$AFx GpEiH*trc*WYBDں+Xς4T6154QKyb"^44ȱMO!k i15 49g,/\]0)M$^g! uQ2Xy:߫UHo=z 7^#Ȑ=sIM"E%OҤ̉x~Z63fEbVfםaD88JyN_M>ҼD_"d~^B8CJpv]mƧ~X[4$CNLnL>֮dkL|.dp>Іnd̾Aڄadيߏ,̮9bGFR&Hj`"-w1+#k#b<'S;w^q^݇ǵҎLoVɻk6O&w޾k8w/j-4,Fxq:ܽ{3<]z 6B.gM+D;"$S9] q"PS %\r\]{ikJxwvIm#n>_7kŰrQw{w(T;r]kw;oSqZV]Ѝphq[q7UYO]!kY~K[Mm >T6nhgJ8.N %R>Ig=.#lN -=CĶnpR=2鵍E1@=u[.j)P(r!(j D]d!"6DBHn+Plͦ~=USq[tSR_9C (h]~'zD2=ѻVDnȨs (^,Yi PϘb-S"$So!/}߇y竭zw`MkQ|=7V+t6'M\6Rrt`: LVQ(˕1sF! ihE6:e5&$I6 i֝ʐz pkړȵd֋?f|x&*_ ~4,zvX fE%9ߥ1X6k+4Ftd HΑ.)ѥr 4 hyjIs( 5 .2tZ]:EM37F̖-J/u:k ͈i"dM10#pm7ѹǙB~'GdW.&{:F!>)Q}p :Rx[FbI&q%@WGQI\ӘT.EBZ "fo—J>dmȩHz4:}%u}"5H2ޡ#`-\9 RGF5 hztoRҿ[Ⳛ >VUWg>?L~Yj&b/5ߘR{7rf~m`7$ћ-OɍdiiC$\yH|7HהD@&# E{gh!P=?pt7 x[|vs0 90 ^2T+~}ՠ|2;Q[ϧ>44BeW3?3"'fV7D_#Yo:м/{qSXz.O|$Q'ͻ L'k~>///>.~jFg9c+]IMNNO~%_-(|?g$k[jcKRoxS3|c3r~ufV MڔONgFb'GW=p7#^&N-ș:Y|=GjÊ/籊iT{89yB"TN\I<Y0{.w6x7'in\:=> kRD|X W5'iBݦi~ 1 &œ9돕 &&-Z[0qe_~W {mߏ2b.M/=Jɇ/-g1Xe 4jfV1(DXPHufVCne5ٗ_r, J^bܛ ɿaZ0ՙnd,G? (ĠAܴāhp0KB#F?{tY.=,leۮ?zmz{HdP6x|p8eF\-t<e4ό3ȹ#>H3&8Cm 5)Iz d~~³~'=3lH}bOɾ-W%U"BIoKBp2F~xqoQ$eף`U!$4;Y!?p##Eע9^ ?4" αf 0 )%фv/Cq1twipͣ+$ pAspW׿_g%Lj\P_]9s@o{Ax,IL^3%n,`VKR@#i1#f/\k C \OK*xs AmӋѠ[>(qRl烦0/P4}\{y4N's q􇋧A_y6b{mYu!?vsY{- .JV.+JjM_V&]a7cĴ o2SG"uEX0Q6_[̖<*u[fI"ڛ1/1Vnlُ1Ro{Dx y,bRWc0g-yǛdt{~B{O_XcuGn Y9&Ú9VDuEɹhn]I47̃Ka)KM:U5sTcac;.pM7y1Eg06YŹ B=,lMybMVmot ׍Ʃ8mswk&>XOx7V<_nPkoBR_aFN&Âudɰ,Lh.Qj2DΐBk?њ3T>P]~9/&eS"Q#4&C!0ӤC(㜣8$oS^EI[Yߣ(]}(\]9batk"11/:@.`2Zg?r1+M´L+0TS3fd񧹹|.sԋj;NLjc@k=rm3Z-an%]&o Jx пnd߮J&^RWz[m*!pvQ3(Ξ9~p~L\Ca OVZ<6=p{|o3R__>Wwa(ϚL@Jza[,G=Xh4o KvifT* oiewEwq˴IOӻUO!K]"|Ks {~ö WZx}A3Ő7ھxɦ8˱NX;iܝWR`wx!?Ƿ/.|.AV>wiv}eQ;^gywm-{U ]Bn0K/‘~/j{X!8\|6I)n`fy{@t2 /5\F&t%+.K~kٸeRDr܄tF2C8Z*A,5nVqunE돽˻r!"qüS+|o ͗k]bӑ?y$Y1Bw SLT'z< BOù喆&>S[Ā 0Y 2]Ti_ 摌-]"F]&wMkuG>kTpc\BL4s)q0|XH6l4˻bo]e"NW eʹ Ou}Lw,+cD4hA~X[dxD!97ʎo`5dM˦a9hB-F\b_-J`áhZLp:z)N1vzz0gQ"P ]eX.|j~o6 oVof fo.T`vE08@U9" ;':k7fMIY Qխ<% жOnG&>?H~6(޹`YzdmʌVf?wlzv`ˣr+CJIZbLH _yR9oV\hgiݬ,f8iɽ /{l@iZ2*Yrj u?q! #dTB%XrYbJpZgmU_'=΋=$٥`ӮZ_uӶV$J]57hv[Fl R@!USwFXC(` 15L(AIg-GEc5m̏_9үZ)ҚC]~{ 儢J aVR)5"r1":ޓbeU,#~`%ZxpgV):#yPac"#RrL*"Ω 3Kgsdb{GcFP@7nTe&q1>66!rcg<+!}X:cbyb{:Q>STSFrZm"ʔ@P#FrIIǘ3 Wc͉FݙZfp(VೇbTAXG#4qKS(B`h.4GZhå6g>o#2՘ ϼ*o0Ycy/*X tqW>Os\PT=orCQO0YԨ?8[$!6Xq$}t_K6`=iA`/ݢލ? 44uhNA{noDgPU !Y\H*W\Mzrg[?~Wd=FyC,aXgoYspk2!_xyëQȈ>]^ l 5C eA}dffT~R 4E_Yكweìb19#bS 7sRm)HƣY;k 9pn,H*gRv&ƫgMCjaNS&W֍ƙ|egNs1\F6:dݬu{՜OkHv $K7K%`x3oTMAܿ} w/_||ݿ~w_zzww4/TKm=6?j>5mM ͦ5Z %-̻>,4͛9xr3mݧ#ƾ$ Y*+l`<37.VU)NZ.H}(_Ύj 5/6GznWIFr)E`yBH5"*+HCGf2/c8.):QXa<_wR )mz㡎tCQ/<DԼxfy`LhR[EG w1&ME닑G/nF_&M.+>\hY6ذbuզuQ6҃kPPsk ]rWt{6)Ja#YvIF8 ,+'JX#,#&RQm2Ck-eb*EQW{Wj6Š\y%+j{ѤuY`/ @ET:y=0WeOw K57wJ[=N/Wo$gLvP=~,A嗼Rk|7IcE?5\-s*O截wPYc#Uz΃RQʝ]&jl nVʢHG"0HJE4J  f%XE`*mlgG^vj-x&Vn +wE^I?NRMُHql:1uj $n4d5ia6HKFd Mbcߝl=#JBt\wI_!e/إ7=`{$8హȒ#L_50%"mc8H<&!m~m~=vCB~u_&%퀴N@P;rSm8 3 z;WHCsKFD§0iP,rsGm4@ "(A L`B3 Esm#,YydcKI ,!&"7s|jc6tqPpc5F yz9ږq@[>&]9:z ].}c 5Z3a5X3 +ej50Gq( ׇoSgoپ|KCAU(y7voNhIxdl6ͺ,Lj]%W` Xt`[Qv bҹVpb#V58:WU~XP.!lLUp;oB!gh՞2#`HbTJ+IbĒs7m9Jб1|d+c~zzTCʚC U'GʞgL|VJU9 JaZ uQ;c.kg(Dzޗ*,R@CyӶKXK02}jƆG PWJY.fz#whJkNl[[`i#~iE]CkMKn%ef`;Ņ^!D,ڊ %7+ưj^z6PzN@!ڧK.Қ[5M/|iř׶.fcX&DM[VXيi.F 8凵xiRZ ' v-'LfNƋe Oq.B7*ڛneTj@Q`^\Yij~ֳmyuۚKp֣IfЋ!PgB޶6WbUb[#CZG^1UOq)j榜ٺYDbMARTDAJ~A嬆/%nmȂJ rldikHK꘴C6*E )CL8?@rݹjb/]:W0gTEfNԈ>KfpO(dH+ss)J7d;u_:e8PKt*"pڪ߻&=BdnpΧ\ZM铯S $c:~|!TChE-ɹ>5$=,ː( 4$f?'g_pg:1w^/0݊dCs>a[:y[w%׽{=7^ըM+tU/OW|[`B<UX S띱Vc&ye4zl5ͭgMu4EѹU>|?׵@kD'_.E{\o1 @ݛroUI )UYml>\Gpz][F`ve8.\%' uIQN`ms6䀌#ZHߺHk5^kc0 q_&av@Z]"-iB΢T~?vB䌂*gRbr)?! u0jQ PrH ilcP.+xŒ(@i Kp3"'DŽ)RR%K!0nŜ(aHa9 :q>{ W~>iNsP-:i,y/u7wo=Z`Wcc  Fy kƁa%PF NP]s]mM5ۗoi[wP{SoD Jl6ͺ2Njp+0,:0 (n;ptx  f1grH8+*?g(wJK6A&*8ʝ7\ 3f4j|0Q^1p:nb9 6iw;8>~tyt=߹~.5خ!zD+NO/-r3y&S6ȉR4g ċ PZp>].M.Q1G EmJӑaYqGkZ )$ (Hhj_K 7kkHC]}xEoў(Q@D@*YЫ;sŪXP\ mY3 $.vVܾɖJRP L({߷Cz{B_DE/? M懩2wVYQbPx:^((CR5jxT>s>`.0 ,- T)-VJ\|\k.}/]ݔNɚP2zE)U(#d+8>jK ^Ȍ ^`A ƜR؉]qtX79ఫ&xz?7"hd G J^B:`^e.DYN9IXBg `*S:$ȉt >r BžXddL+ jbKFb$S, A9p0I̥S\]XMrAIS.Zz^Dš~n4#$AjY:O>B/zb-/ig܇4O<~݁q*ݲ4ElW!A CRp )T|>٧_tK*Ljd<)V\kʈhA #(H8HwyD+Hrs??ŋ6tnasT-6R4*kc&PfTetۨQ1,%! j@CDA+u̅3zk],~-ʝy"7RS䛺/iE0Ial`?Yz+5ֶݷ@S'wO-첵m]wr%\Јsfʵ!,Ҩnvn1.xXs4x~/Oik`B.0pf tP[sCA\T܅37m偷_PgKV% Q8Z}l{:2<Dpt/aE @ -e&.,itc?JLZ1|e-ņu.{[jR\l-G&c2. nazIʨfx>dAG *>U@}2ݲx} .5_믯 <1i֗?\A|ʿa/ p{0[R0Thz%4ki/)Amf*tP*T~wӻkQ281|m|NwB|*T ɻ߆5>$eH(;IބS1ovuoBwLY+lΩ`=o*yMS7T}Jı~KUt].^9若G}CyldVi")%&zqyqoϨE餻MzEl殮rsNg Vԝ iۤNf75˲v6T !üҲxcFŽqwٿe՟mLj0Ilco \,** e E7iw>`VcI8i9H0Iu?8_E(b?e{dJI;a@ʦA# ~nS72Eϣ(Lt$1\a քTK Տ ncs69qPX_fα):~ {S(bپk=*ZfywS*gDB0cJ:Vw񟿚!w/`'>`TO;V(gՕ?}U<\o|Od|aE[h[ eeoKI~B$3)^;3MCjaNSfAh} LTLV|6|z4nj~`Pσ@1]5fɮf)I&]|% ;Cq8ksT38EAt$p~~WS<?Z&ćݖg߹RiKVJBb>Z+I|]WKNҩ =/K8\FVi068,ఠG@ |J,"j}΍PxӡN+:S7{zbP8bXYL5 I*X0ǧZ3T$a|1KL|#M ;ոWScM_f/ãf)?d"Hj4L Ƽ|4#G^Prѻs5dIi\@"dzMaz.*fÁ>bTZϜ]vFAC&P0B!l8-dJ)4Zg>fS!N"hBb>{pK{'3c"׀V D[1kǃJi[CJ!B R/*gvٻ&#]`ƣ𸻿ܻVi4_a]zDMogu4~8qUҐcG~"2 sJAQ;j˗hQ*MyTkLW~=OC`J9O {v&q g#1?9Im21JуQQ 'GYCu3;&bɌAK% P{'\x, P;d {9k wXh$aK8F )a!%ؼ"PGk}CͅG3TܜW`o^~85} Mx%TJA$TI6lXAX˩&KH%V2 Y) ;.Iv+AlWv$͠%whfŮy:moȰ V.;9^9jW6?},>20tWDR7ϊWmXn;"p1EariTg8q=aH0lJFs8rfkKe1 ;cz^yza6'vWLZx`WnoKmdyFp&:Z e2[ozդJ$ϩ_$OǑ4cy2+`/i 1f'ʂMѴͬ5sM>e~jN0x:NjeӘ?Զ/7|#꣭\xQd5}l :ZI6PMᝀD3 Ú.=0:#JL*?Rޚx7飚wawn}IG2q\'4;D˛ݲkCQ!fOmlk1]l u\'\SWf/W'AŅy.l9"8aPyӜ}~w kjj!|b븳0֯/䄖ٳk$򦲇;uy:T.tͼ'=)]-lplłRaʜq,p֤6 Eb`țfa1PoT߬r˚AЍf@̚2{"JZ O-$D7Z(8H ߯dIԩ9֞_ ќl[ 4||K3AÍ\$o~QJd\QVU0Յ4N,4~cP΋ri JLj@@ws DS*320Up<@){@;ZSm8䇶w|csfƤ\ztAɞ!eb_Rˮ RutI>zPx˨dBZ~#`-+m0X0XͲKەϯ w{N|bi!!hҲ`YEĚÈ}ւI%a@f9It @M&UIX} `}_ho+HQ&EZ[U&wFR0`R#<j0P9k#yqhozC,pifV ۖ4A:짾ek˂g-:>jmM;ۖo͍dTͨ$ w>f zS4rFCd<0m2)(g, N=T1`Z)m'n\C  jBjQ*WF$a&`ԥ^ҎH@"ԵP~2e A4eNd!t6(W;ȏ*J]5gciTj_nD*btlJueͦTHOpQBr DUDG=U"jFT6rS2*Ir2Ҍ%)ZbcJJSr))xj8E_}T\t+IgGZh3pҁ!VV ضUR eA M7#|ᵔ{ؔRtI /)0}Dax/ɔ-@:0ה846,[I!ɞ@F(ǟ`hMII );8>F@$㧙 pj4k{:{ $>ct|Jy3'/&ֶn&̹ir>rQZ@&kk Z=B嵒XI׈gC R/ FǮ5 4dazi)?Y4Gb!dZ9u\Ʈzm8-ι-ϵ(.Y?|Kh6?RcO I095Yc{k acBX! 0rFV9+' 2 D {QIJrDqnܯR0ev0uj5 _p2Lgi($,XkQ0͈HZHG$y`hcp>;0OWF I0S2$;:`rHtmS LlW {bI2dɘI0P%J8Ė i 99v˜`"±␘Xk  Z&IK$KIj @(bKa궊Գ|U]E1pxC7R :l;11X(s?-V57>=Gوs :W>ՓLa30lK)=\䳉V& ڷ/!rDƖ1! mg ! )Aa3@(l-'aEwP&^J<ϻ;| 6/jSWQgz,2O7fa#!Zh%2)T%F %vP{aNAaJpr>g4ojw^/z}{~ukwZnxmEK^&E cf%y`E@J"r  )ѐkH#TQ'M\+9X`֖[P?~Ik泆b2dVHf$ :Ih\,XI(N,P7ky]1,ګ UAR6zD۶ER].O;].+]C69mE7⾅0E!n0W-#2֋\.ִ2rCߊ[\!{}uPF;o1@P,QYq^tԤMh:Iסלr%v$4P̊I @.% kr׍Ю"\r=|Vk?K 3k>ʇڸeL3"4#:X[Poe^5=aZA4c1Kޙ:1:>Us֨[>+RAK(.D ࡓJ*= #K& F*qw S,Ds!`1pk! J(1YAk%V[s46] dNWn/}_2[K8ɂj &]?ލTEWY]q(ְeEEpe fɜcX&>X/RD!3Z1Id# &J2Lyi23|F uqr<7kgqN7 I D/$F !T0 C;%"< *,9@OطٝMX幅`xSh$?W`(0 N+Pnz} t/yLvt% 7C>/iXE$@(G_kQHFζw_(#1HX:׍iVr۩ ›p9)>>2ivx=ïyœ_f> pLr0`䇃Xh[ FoOacX~K"Iڞ顮T bֺ8ߟ# BOW}-#lu1ɺ^*)f+cɿ2ЗH}!v/SVd+[=IQj(D#-Nj 0i|דc>ٸ<{I5_1l0*gnpyӷ'o޽M??>N_p:=wA6x o-KcҰ^4UҲSWo.]a$[7Q){ȿL\ߞԥHR'R,AR_m_rL]+܅/X"R f2#Ab&[g_jśIvI(=IZD A&)dpC荤6x_C_0p/aMa\~_PhYvwxӫ/ϣQ`s]xsv+ߨ-B{Xޖ=t-ߎo#R;9W,H0s'QR{+m*XvI?_;T` AHRVa S띱6%L"^ˈiDk45[!-uy5ee;s&L]ISwךwɕ[++B&)C_& Ť.RHrt^i#vqU7|pՎFƇŪya*oQlP5#W-<©WDgY6|m V>7!{܅}[[|n_O25ČIgFIXfZTyQxՌiMF\|JbUqvw5Kڪai]DB'ɏxZ@1Ή+C4Lk2Q-&-10ˊrx}6[Ƣc\g8[ㅾw0N`Ӵ&He^2p"-Kp?1?fσKrT[I+< Z- 0^EVZ Փg/Y j4)1,KV " .gUlWA,򽥩ͫS<`s-:2MЗkBl|~&G30^ʅpulw5CSj:Rjsfȵ1OL.]x: !S7[ɌV?u<_YIb1Gp$Rn0e\_{98>PQlcc!\`s"^!jHhωgJ3@Y > R@a" ,#!K pA`p{E);Oi66MicDi>ϔwf<bϕt<-fM{\-oCWgխp Ho`uFV`'گ^^v/VVxڝ`TG <* 8CvY@7JIiR"F1m:W㯷_|P8aR/1",DDPU5eDDL ` Xy$R BsP{9*A]^ n8h^tr>7rBTW_S{VӘGI0Z2ˍ ;6`#*$;A At%ng4~%櫁Kjz65w/d4}4-^PjlCP6©ǭ6Q/? Bڢp} 7~| ]$^MO7;I7XH>թdhp %J7bi1gVaHt|O 9j=0wޢu,Mli@4(ׯXqMyF7ys8rwп{|2{ s{dY@WG[&(^(採&]ԀmWb'6HZm7X7jɎm<.t5Bs]rK.=SkTW&U>N^f{Wc1H;V'`eZ\P5T]ƪou4UHfߎxSJ̤DWiCs۲g=ڸӭ &NzWZASQz7EMoG|]~v'ɖ չKgJ!Igo)s\*g8#\5uqy7.ELLY+lijUv̫&j{%ykYl:NlP/>Im] \jr}BӅejHJSQАq 8dmx-wf4 |gSΕU4R("wVRiY;5iy!-О$+JJGhTX| 'CoC =")9IE$N fEwZI%PD0@AP(@2xCܺX  n`hT:d5b&6Vvxΰ&a7/N[`(B=o'#cK6B{,eJ ĈhabR !A04'&)u=!uLH.#SP"½%@ Q*r`re>]jlM!@4^64)FU[5_GAdL׹]Qd=C`$*5ȉTtFΙY9<U{]dQ1{`3r=̅:켄-s.8xzZT9wFJo\WhZ4.0i\fY>0 x4=9LWf0zM6Us*4=IPHZ4riI1Kll\F=TNQN637<tۓ7ަ^w~8Dû q~Lpذ[} 6poO޷_Ӯꥩbm]xu '&l˜-O믯G`RT.E"?)4`•g lSfR])N.|B1iT%7ߦHܢu=b}tWX/ތHHF蹔FHg5$Ze6ވ*6!H!FXDo$yyA>a2uVDiJA`Q*Mi'8z3q"-X-tiJ>Z}sv4tw a1P/Z[~-&*C,RF|dKD_Z:t_z@󗶓rtqSvb@ќK&/^Ó""ۿ5PT314b5n) .){p!^@]69-ŕ7 8Y֗|(ݬm #Y_6b_ d7=:9p0*1(Cٖ?3C8$EEnKMUWUP:_DDm څ.GYsH"l#"(%9lVJH,h||tyغ0<,luv9৛ E:GI0*.)O|AYQ6`7чQJs_Xfy;%\EWqNjn!%B;겺jqB\H`y[7m?_Q{GҴ IK6p$JJcuғ˧#IJ7%1T\->c#aWAO]ǓϷ%qI+.Prelڕ_LΠPjT ʔV05HIƥD' Sx8{2(=f9%@`5T ߫S`1Gf|_u ,ow.f?UI=Q*F!ݑ 9:j(m *"ߵ>.`[qz-3,7/)81Eb})M4k"+ ut+吀lNongs$c=?!="jXzf=^#n}gI(`Q#*)(ԾcL.-rguD։v9x Ϻ?3 6^T j͸C0;C0;C0;Yt}t1BU2(7ɤeZmǛf5Mq+ohPF@乙R״121e` azWz /١j}9/b;󥹙^IꮷHl^-Eϊ^d`O `K3b3@(+_�)^3KYi($`:-8BWgE]eAܾK^蟨xx2c =1(ل000&L"Bϑ?"bR{DӶˑm"X &al&iޞ5ec\M\ 8:?8v&j>ĠRinZSn\vcDF5 vOr`'' vr`'' vr O3x"gd9MG|49MG EXwv?rx@WP w$K^ 033z!U`J# be}00+CʘtoEuH "> >?\]ZL,/X{H;y-Xhm 4wFm6*GT KIwW1ϜSUu2^t5oeIK7G ]csuٖƮC=9#`M_wQjXfsf;lS0L`=ﱞtvYOڵUuZ=b=d30^\~^w=.9+?{pK㒖!])%*:;';O'{IDH5nX/.TX2j1{aR{dz鋑_3yz:2dߤ~_G)a.Z3MzàR~t%֮ʦOd#œ,eYnܜ,1)@gYNGqJ{ pɀRwIn@Ӳib\>\;K ̂xdYg 퐫# V';P|_1uګTI=J va#&ٓcK26z<(TP|wS\y[%-wʷRۦVSZTB>̨U'{fjCn3X&6:6~;m{LZU&Hqhn 4Su~҂Nd+ZLDW<:g3CQU>q887 ͎YҐr'yC*b5jN VI$ƈs%)d~k}|ތLԹT$q/Z acۃ౫q#Av#l:Rk)KB0:_BYw <#BH5$kַzB"&K,*`zb0l|vMCuځ;D#]u=0hg3l};ylu\F &8hõL 1Bƌi$ Sbҥ |=yŐQJlB OS52¨U1kQRk Jzqy3m:[R^t \ (뺻>7'x8D+ݧG9:^7|]n_XNiBn$EK !Cٓb+OMf/@| o*\Khf~vlNH"%1t* J3K`@$FILX30E=MXuGQ aVuRI)VUf,@0/~y[\7!?+-::LR]g$_7o@wŒ!|_UΰC 9]t6FgRP;3UxulI WEP`85!j|6pRQcnN_0@OC-ƦDk{5bi/ϧW{KF Ht[.M!-|i KPaF5<ޚE<῵_͓ooيHڔr'se>Fq8lhKmDzʹNVoNajʑ#]u C:at,oAh}]P0Q1ivFn`fq w5jZ5Ntf2>MXm9y;|Mxy\/?~|޼k8 LS`=7 7n?4} ͆m3s->1CaǭفZ(}Wz0\v'?NkEҍ w뺓9o_DžÖ,߸s)$XϨ5kI l 6!H!FXhD60Z|dQ04XF(B `S  GܢdH4*F V̇:T:Win7k *KE!leR6}:zG1(an&x9 a rbrX+rh;輷lG˻ yrGB%]x{=>q_}JV /GyBSjǴ"2FKvdq%rA^fFG}/8wE1X20F-3!W E=r%Ǎ֤X3*Pu~M嫀rGG&Pߺ7Cx|Sߠ334GdT0tԠqKL?aL8wػ:3tn0Edݴ[f 1҃eaj ]o?rA=p4+10BB2PPg($^2!YobA% 9Pg HuAc-.zJ)m)lDIUdQ[;zQGVTepg}Ł-Rfv`vyŵ%HP\! inxv()>=X^?Sd%E/)τQeGBh')lrH #ގ Σ ^Co UN1vZ CRx\ ZU6A_m.LE'|t"w^v[wAÍ{8}0].*6#/\xxM*5o[#z {uQ=]XKslt6S{mnvo82ALrmQҌ<-)[ǫq.`9O'؁3`gmI 9Zr;Afxox~B6Judq[5*Cz Y@SJx0>8bZb*Wν oːw ~ }mRs(]}eSW`>'YvӰ%.W__G- UJwJNJ;ݙ/_u9B-(_ՋڐdAV7IJu PͻvJ]N!dS=Erˬz W"zYӸa*91Ē&ָz2v[كC@P`}&KPY5z7˼oQh΄K*Ljd<)V2""&ZH0<)cr..Ԡp`[N`UVzOAsZ#m m+XN˾ ftQ ,Rg2D'HkKI RDpCJ,s|$o6tA7IL óo vaգ=6`C ,jtӲM\)z<,2WPfO=a0,B[3[,}\/ZPz[ՅItP]&bq{ ;]DuC@CFscrK.=-x"m@|_\h i\PG8~+mz2Il̬;B@޲A[]|0㷿PGԗ-x-&4C<wUu ׾};8h˘vS@yUZTџ8x;rtJ#%NX' 9T+j՜(lJ0F{-9L!G|x v .5LE;ÍK[ya+,}0?P=eBy$apqeOw x׳ay/''\gq2`xO> |bߜ.uf{(BPl09gupQ4ɜ)r%9M1m/ 3|{ 'b > bnv1rIi")PA}`Zot #tFɨ”t#ө@W\f. 3C_Kp]=BggӫG9#BU/ Rϗ6 39qM.Ԩ6ޚY=9HU[lx7^H+m0jo'Q6>̦ebs@mI֙_=U4urEdDh\'IGf9W1ܾYd۬mk՜;.ұHhh$M}:48fmspP *'Q 6U͙\_:O?)}?O/0Qy~;8Uv@va1jj*ԲUżwyh>oi\DۻqPqۜՖVNjEȣdQB 4XF(B `S  GܢdH4*F Vu:99hp`]D=`jO諥EzU}u+A{4 ¾;g֛9-ŕ پN#jpuv^Y{c` BT:+XFJAXBR~EԦ>HKb*G/c8M(rnA:2,"@&aREb/%٬-scjZ.iDG3b7f}o`s Gr`=>~T/$9IS1@Cbgc#9;28W{/{U/kۗ=Ҳ1`v}41j )",W(*9-(#YθqPvJ}mMr" {[ߺEU*#uUXѫR19s*f]൮i|az8<mFwu2T3Ntn7 {kֹ`Dd,TL>. p6t!ayJڗowAo iyMo3Z2K=9\! AHRVa S띱6ݮ D佖豉hj4BZ":+v_tҦUfH%DA+k!k/Qct>8D ZRkDŝZā!I <]QQ8@7PNroDulwf 'o)ESbCLɢ+w꣎/-`[ Lσ-J`i3/q!e.Ԙ,F ;\1NȰ$mWU޷:Moms']d [YKRɪ_Kz3A֬*ɨLsVIYU;%Kw_3軜t~rz6fd*h7V=SI]vم{G4 IsãȵsDIijTGjIj{)W.zIuNy&cډgJ3դ"30bnG}{ I&:$z|Nc&gF-f7 ]K.hȵc%ekLCQ/ޞzл\W9*6 :ﳡzr@C&oam:o6Vyz|H5o[GPw=F.zMI9j6on[z`'9|z3sr뽬r [l?0GWrYU.[¹`1X>\c|y$"kY"MC^l&H*wT{q[5*Cz Y@SJx0>OR.ˆ\aY~_?{ײWd%ѫYԵZ4b<|ڂKvsxSW)K%Yv*S FCDlnvy3S]^i:z'SΥ^VW?A>{=/Mfn^߭>j/twҺW,U>~ 6V:M*eH@!pH5{\x}LY_-O?QleǔV%PM`{u{a8,x9[w:JDz6yWnjIu lƛVZRw =ȢmY8zW.hX&E+z/0IxFob-J>f-*mptxl ?nEJdDŚ|>PKoj2SSE%A2ؠNt^\ RJ ֱ ZZE(Dr*TUD|.ǐCxY[:tᄐ;o1fI%\}K""؂%6S) zw^WcL'_q{_("*K^]FR!5fb-$,+56n+N=Vsg}z;E{ݛAƾz}OyZtQ=_n7\s8S,2nK.O"}o~+bfv~⇏KLz!_w$֒[s.ޢ 2Vr/31]⛽V`5/m^4ܰ~n%/ϼv^f'iVOL::Kr-V.y`j0, q:ݼ?7[?<4?Nhd%َQf)Ԩ7VeV,*IY-^[iGiR'&s3wW/g_)4Üy5k7://`^nvG֥~>7U-,D8ߒD hU׽v22 Ib7VmMםߗJ1UB"ܦ2`e\8jԜ(6~+̝K-xH#kHGM\ ļɨ 2,0`*6KP- ^Ry#i-4}#?|5crIJ[׌R8*T &Kb5 pX-|i ,fLcSJx30f3NYVl2eQ}h9žSÄ=-H}oXR- ,C됔2aô~FMJ}**RT!<>劉ƪ2:ּk94Y<5Y`u^"{gˆ "x~8^}dᝏYl4e+uH(R4֧D!۪ ĹL.Vk ,U%X3F"ΉT,mtY ׂLQ 30aߨc%Gem43w;zVy1뜵'Q\2c I&l.-6JR`ƨ1K‹eX:RȦB*A+٥$ِ"L%Dtȗ ?q*%c!rrFb[Q@Q lxZXCۆuqx(톺H_56҇_d,C̍Kkcm5iX,# 4V4gGte,~/bn ㄸ[-t*x$DX$A  ^zdi;z,5 !IBnTz5aƗ|kjoywS@yE nX@h ~a-Ԓ"4D iyU*}p|RT+gl,98{*ȋEK*z@:Eeq2pt/d,Xd*P?<{U|ΈJ!m+dəDKp)ecU0} s@ wK ||PDP-fx  BR{p,am.F0ZN:G!:v,p da|%>^-l+ hwX33`]V N+]KdyWN ЃHߔ,EcnXd$hP~( >&}LpY%,h)dl9MJ6 L}tb8k4Q8FJ"4؁lJ&M?z+"t[42P[ AS5y+6Y `ȔZLOm0Dumw/즗G0:L4ӞE̵D{\nfQ:Zhѣй1c00o4; ,fOm*5fZR޽)E. `L:0[E} 9)1l>#F ] v N]r9V32P"P`@H H*,mMzE@z5`}c^'æGc Bf`+BJ1$'WW"#oonz*=O((c)U2QLY, ETi,WQth#ʅ0AT80v*`c+7 ZFjҀc z@JQZ^nIW'#)Л-L欭Oւ?!\\Y%=(JS>iL쩄ڊIcT>8F/+(\JVyae3 _XU?iDF:Lr%q=Gtm3;gc)kĪh.zRphҨ3FCt\]XztRZ`4bˠvẖ銆Q!"`B߰Հ;/Bsݎ.⦣--2f N4~#C^^Tؔ|z Lɦ2amSV|7kxެgT'Hd Jos2E/LƧ #ɘ7#,j׳<)D'"T}D$C(d],W._t@~R(ŃSI%ȫ+Ҧ(C|ڇl~40 Z+!εofBzz|O_d|z߆Y[=Jɑu'f4Y':iINuҬf4Y':iINuҬf4Y':iINuҬf4Y':iINuҬf4Y':iINuҬf4Y':iI:iLA=+OF' kz:龅,X'u)iN7k|r!ok~77t\#FhA\hbBbO`'6 P9HG#~A8Y}:8};ZNGK/ ڊ\T{I4=?{=~*Jj7N^wN䒏>95kOꥧZ^VW?A >{Yoz{4A1v甸[@_S<8 _zfxAuxZ jx=D{EzuehJ;\5׬f5kYs͚k\5׬f5kYs͚k\5׬f5kYs͚k\5׬f5kYs͚k\5׬f5kYs͚k\5׬f5kZy 4`溛S\wk{kX vϚg>R|7$rӟȮkCv_~\'+La5~LYi!6Qs=]M~QDˌ@wc+4﹐X d$RbDʪl5;dǴvɟ=ܲn~Kk/^)trt8W:J\+ptW:J\+ptW:J\+ptW:J\+ptW:J\+ptW:J\+pt|+ݤh}rtXEØ͵JH"jQkzS74Ħ!MUR(0au\!Z7XOkGz cL5kSϩ.DY'R[7<d%`n6iՖӕKM5/`KaۖM"ó򡠞>V0S zdZ~pQP軟\p{{}E,f7Y͢o}E,f7Y͢o}E,f7Y͢o}E,f7Y͢o}E,f7Y͢o}E,f3}} Z﫿^[g;ZN6Ws{@xR%rOF'ã.JYKצԥ>0ƣuwP!8f;F揧G/fUsSfgh2ŏo;|o{>cӫF>pهu,+H'R-N޵q$2K68y:dXX!UbD<ʎ~gQ""iXS]UUuuc:'7rt|o-wy hA>n?H3f!KO]ޗkjR@k\ږ~?Q h{pvk0VZJ-MTJXx&%Ha׈dkQkwyFp:MN}-4ۅDt 9Rjn-xXQ.>dOw<{YpΔ {BXx~}l7`BiZ1ExСx0|w~NSVj`|޲|<@rFϵ,wJÃN7vלSatzt8-b4]r6lc4K:b AkkDgoRćl,S ަPV E>2P~Z2Nu-7 s478ȩ5^l1 F48! QF 8³ۼ+7+BJ@{R`߁*fyZf|=),~-ރ D &,8+;9͖Sߞ,/p{Ʌͬ%\CĔ?Vdof &o.7d9:wԠqsL˜p8 }Ń.K{ctQt{(bʠ6 {e^b@ߡ}H3ޛ{𒦌98d~-$!-O[.Erl:-zUR@B6Ds|"<6Lˉw`c w"Kl(ߓ3 @:ݤ2, ` Tt?QpbʋLiN ВH):J*dT^L$h]EQk}IPzDbA - RV'K$RP#5 \KBpa J >"k {ƒt;G#SN Hd IA"&ag@$(JW^v8"@YK'uA(ɭ@ISEv"-B,O96D׾߃u7*}<!a.߶PKn&"n93X0"mC鮈E8uvh)kChRVh 0#Xa;+q ` xT7ԢTR:Ş.ڳRVQ:t7(`V-Z(T=+[4kThi((v61"pq9reuV@6̰ɶzIUpXsi ac <>I,W(*9IʡLACO4)@ LGn ̀k㭏v+RKR4B]`".0 UzRv;jWRQy-5/T~ܒ5Rغ|tH(FhEmr;١M{7:hTAԏAդ6>/ʆ6u3z\|KXEhJ垻V$P9d7 `Fa{3_V2W2L).Lvw&ɛ`'ȇR #^ AU4X{!E!mXZ#,n n#~Vpk,f\Ӗ;N=FaaiiX„[w <3hkR%5,Z\g:2(elCO0,尨US=][ՌmEjQ36͚q=J9.l3vՅe]H:]xP]8QzCb^Rv}op~LAn~|EA.]HTCM RhS`!#a:0-bJj&[r !a iU30`Jn A|@N@@c}RAӆ=s_v2g$cmuW:I$CJYKbv}u_UREm8c_2e.H0 -"߾q1[%bH'b$"qU//r06~f2' lf J/Gz\}͐riu+*YaNcFW-68]0*h2:ڀmT 5*UOFW3gp8 HggˀpKmp2茲eWMo4a!&g<2iϓZTu)RU>mi1&W1BoRSbHhL_\Kɭ۬HMt>y:"wάάru>mTɨ;FwIV3mޫ{es P$G\c(0 NnNRnd =!"=AZ3E4^JJ8%Rb%0liCn ȲwnH|,͜ ,uRn04k^j}ire;\!`vy<\; m# V',sP'x^ml;i+4mm07NF 3HF2j0%&] wļ &LI ViǬQFYJy>%lKOI[MgPZ[i&=Zz{t~9 Y 4%}ip GmTJS`!Y7J"SXk%!ڂIBʁ& AE*PgHFǤ"& 3âҌ;$$" (O l^*%CބA.?`=c 6 a>q0*aCY<UO&oيHl2 {7mvI _&e7~3 IƑ_=}0qI0ևO@DŴ)d1LF֏:}ɦQUs*2]040pREȆ1Kj_lXz&^:j$5Q>z.s#V7"&)dRsM茤6Lu^^ϟq8*l@`  Pؔy'8(*R:Mт"CVuN*51j񠾍x]9ک#nQթ\?~KK/V.~8KZiB/7S m 4 Is:%q%uA蔓n=Wȡ\vK50S{ĕ^gS  $8$ַ,ѳ1RbD MFp[X"E佖豉hj4BZ"ZƈMgKۧ.w:W}y=׭XmvC.}OrԪiv$ 6ם*.` 㭿U=ZUjow{. oby6L{^hRO7_~ vNyr:o8_[qTXsӦ:D/l{W>|:]KbK˭͏c+JNHs4E}Tp.`9Oƣ~l;kHE>ג)E 0; ~䡪M f6ƻ/rP0vY4a#A2rVk/;O,} *$M' OUdy5߆l^ `Fߗ ݆, @٧0&.@P\doK=]ǛZ_wABNlWP `i6Z"R Rq,:3\.se0n&m`)*11smC*Pvf-p Y#}Ħ_%VA꛷ؘ-HZ޸9fH֑Z3V!!:й\SNTba ̵ sv8~7I(xlRGTP0 0t>ޥSّ{A4țWƐz%}vgAlcx$Pa-,^iq?6`l]($T#"ᷠV7j"Z8GxF@A$41S<R|89 |MA'shy3TCGw,:H*ǒ%?WFAߓ=E2kL׋m޶bϦqL=CÛ).oخ!/(s f/PZL,#Ax]ż:*Z2ƘtyK#^hV3M>y970CJb #52 afT q`9-RLp/ c{97':w;Dqi q䄋&؄Y"'oc1`a3 MG5jTV8S$KHΚ L `M Q "h=\^a.fV RYU4.5a*"S GE"JFc&= <чrgU'ړ$d@GŢOVfI6jm0, nc "j N@ d2֜ !C"At]fmWr8H@;>#@ptGXˑD-ѧ~1QIu>j<-_=@BWUGUW#X[]=\}ϢV qu<*<-uEhVWz, >uȥ\USWWJIzH1=#ulU"WJj| $(TH3RW@0 &r9?uUURޢRZ'*zw~>Кd~ :eK b?|hg8?|ޝFzPwN7HQɚy6K8KJ xHV-G Oj)a0np~YLܺ4&]oO5Yn ,s2J ίM4XhT V0bSHSxČ^* 6J&8rN .5T h4do#٢\VHt.pZŷjoE9߇7*8tmq\ŵc d*P iȥ\$jORrA`nv0%n.ү5i&uDsXKcM0&#Vi[mp^r8WoZvxs_o1L6Q^1GP+If2vO9$+=쯍f6yL2b$ eᳩw$S)&Y ݀Vqϫ=M/8P$DSPg gR ]6ޞо̿ ^`W o5̗x637z!UJ# be}0`!$"&ZH0<)c"϶N?.Etjb`ojS.'l4KjuO9_䞹!?clbh5Z`(3*h2:ڀmT 5 !1g uo];[<@Knѯ7ɳSv,L9L`r+׿3L~MIBg9YqB,U䒉v`z5G9ȭk +0naOuCQP*2/'00~y)؄79:xr}mymԌqno#DL 6^ i.{s=yI]ı=mBVB U(,)@9" /2([ħg$N ZOE}%'/U /"o|gmҏ>r19b:bUyfeW_~>ݎ]_v6r`Žj;):mx=V&Lu Mjk3{ZmxꉩRn4a5*ǮyX3t;,liT۬MX'oE1kx23F9?m(p@l ;rtyI üA!(J VI$ƈs%)d]YoU@ ]ժU=" (Ъ6Kݞ^5!=ot M'{ ?)s@OK#O=#t/ X}6^\N+Q詣{q {q-kdtNy,&11p!-,b`ʢB f N'f4NNL|ۑe*1B0lٮGa=sdN#DRM6\ 8)8A2F֘1dBaJL:XEGR؄ A2 ǨU1kQRk >R5Aclya'4Nt\[+,W=<͖mct^<>]i<,;{6U !`Ҳxc^~cտ~?]a.OavQCC$ed]И6547*вYW Ÿ79qh6nvl-ϭ_ᄑK%[2Q㏻椷[Hkؓ[~ vZzDI_R< QsJČfz>(]%&spvEbhM<Nq' R!IQk0*`!FiS4*YxDV8>Q&ZG9m:$ZGBgM(ق%wΥ,bE2Αi;)GZ/!~U(;##gCډbx{1nꐢ(yʙJYyb 47Ja-8Mx]2rJɨ DϢv,; (B(/P3Q ՝z` -4JԶև>n~T@"2 J2Jġ! |\h8(eB?,&GIۻ"7%ʉ3$*M-4|j9&37kLb/] dw1 c.:i)X  xCu46 ?¥[oH % σ7FcMΞs(<4Ee iVĨqCB}BϚOA$PE+̥ĭuqwWRܫU[%yr59 X4<М&Ѣ겟KxVh֓\I]XڗxL2@^g-Q-Qg3@9hp%`ri GQgE0V!"Zw_͌箅Ptkq&[P9Y3D<]6 QyfkC'gl9h[&LԆemlHiH"LI%ad.B X[!k`4(!!a̗u2ɘwTQ+}6KJ"Hbr n% ee\%jC8:I$MJAKȆ^V* m\Mut Q]莢E-ʶ4vgb{}0rҒQ9pń$ȑG:zyRCTP%s&`  pt_щsnk:͞^^ }Jie[O"'iT.ǦwmmrFwFh'KAX}x0V,=[kҫyd?4zOo~Ivoku_)Eo&<2,~\d : 0$2 ˝[QjU$:C/ ŀ.Ai8B#1bƜ6{K#_S^^@523zۚCpP l7#"BeCRYUAH<ȌsFm#WYk@E$DV,0!ŒED6`,Eմz*5kI Ԧ>/ee }4!e)o83J"IoT-HzTIa@_S6FQW p[—V2fo V o8HT֮R!C ʅiڃWwDetuP7ꫣuAh2*bDO`h0F8mvF 9kX2iZbVR0zi}TzYD扆8o9?U=%u8LhCdR *GHp[:=ҁ,T#E$h>f5AeIdI2nNړR u;oJ?s֔g1o=slB zkzcmk}S~K NZ ш4SZQ͂DgiqƟgiqƟ:'7}mX n/["%&oeVĘ0R1(aӎFhӉU+Ƣ%Nta⶜8$q~uFդVE{cOcztsn"eKܣfCGp hiORCN;W6 UvC r%X `/2JzzBϼ4:J?!/m -JݩƥWAmprG]=J]A#uq^_ ׫ }٢W߼_ÏJ'ΟvOӵ?MO׌yozvHkGHMF})sV[[nWW) Rt#&,}B;ՙvkRYKoR: *HXFMq:YL&`A_lz4.D]@1 /^iVG%ڳ A%Q"BuۊN>ɮ}x4yvAKm;|7;ob&4 $r]UNcw޼k.3BR!WR*,t+Ee*'Éuv\_Ta:A HP*ALL*\g x -ޓfB~?>5ߏ ,Xrs~_maUEW;t7)r!PnLm8Hʨ1LJ*gT w71;(8۷ /y=3j>9LoHJ>ya%ͱUf)GЎ{f7K J,t+0].uk|8ixr JUV{OsIs<Ɯ%[!l |q$Aajw\W:ZZȑc A@#uRT-tԉIYiZػdºgdw O+ wrOeɓ) 9p y0X7w22)V&QX&&h V+4-B.N) MEѥk eAz-eB'`g)85-r)`>*xZ3H]s,2A&B땉bHUeHד877#pde1 ~CcL޻Hר+wqҋ.;k2/bhL }59w$Yi!11 mbzxgt:I4=-3Iߧc00o?.CWԯfWONnp&*%%U>9bG?i8+ W0)lyսbb_kSݿ3?'^^&^9Rig.§A;kV۱1Ptp8A$s[rzaKJobQ3ba3N$a>!UvfGm_<⋇Qh[ hLN /sڄuH\ϊLQK| lE0[T丣J7h!m`pQTۋ/q PESUWsK8: I8bHd*7W 4 %  ٻv]+^>/j?ؒݝ4:YUߏ%!%1?$!Lc<=%y Kb9#C(eJJ@왅$(^:lG)oݗiiVzdzqh%Lzes׈.]a'oD}h:X3P3md`)DW_ !QgG݌WO'&yϬJo=OkDsiNcwvoC0HŃ%G᜔x=ŭ6}r{;+C]b CNDʂr^_QcSw *ju镏(?a $ޏпs0_RBSVL1]|fIw$ ^d=g=&.9;#UFou`s5{f)*X̾qeZKsOrھ5^Wt]& ,,RԺq3$[ -.]E{~omz- ݝ~y[c0kMכtQ˅@C^9% DIZK5u"l7u ,tb龍a5VUZ**[~]//G/k|3+VWwV?Ne4 ҇^-QKjk|?bV6.];F\NZ{|twu@ˆ'-[=;6x.{Ӆں<Î/Z-/wq촅JkuD tkrVkhJQ@P(j=SF' F?^^v"\Fp_W1`6 d#l>;fYrr>v)}sh::v+0JxcdV5[nxsX,l;=?5n|ynyiȆ[]djIs9*N}vsG4Wj?STv͡Giݏ"}&,Bx̭s3qsm͹M6)*N@fXm֠lr7*-53RbEfɴȂ323- Pݩ|EG{8*㾗fZq#"ŨN(JiI {THS82&v < m y|<#1¹AO|{Q&y'ח/?M7[m7g7s9gv|7\+:nΞu(,Ǘ>뵏^pm=v vW1$ܫb' e`_ƔL|6 .L,XF&2̇rkp}lAh[Tu "Pc&.(\QuhR뫍}׊qgNwhLg Yg/۶u*z ߙyޯ1VPo2[vu}/ox#^1Xy91%$0;e.;=;o83F#zji)e_.]-Qr)h. )Xj ,&S%`oBh ʅLB{nt&5f+p"`kJk:rNHz&H Н"=sӫˇ@;N{K]CUֆs}%ٜE]V`h1mP\%!27n+/s",䨴Ut>)fi$(8ƺl8_y A\X)fX"r"3ʑxb</rM!I_&[!J'qv;YI7B8@AL^q)#һc 2c,X*ez5 6}eX:&LeL` Ai-8JTBF]4)-!ɲۮcc 5RֵOfIBwVܱԷϴQdߕ3>ip`|v\4D {57,eYhoSBvo?o$eY*N!"1t^T*RcDp VaSTc]7BD) W O V3w/}ű#ɹx뀝0:\%FŇQ x?a/!K!ݬvSם3=-*85$#O,DQwS8&pR,w֞VXr }I9HVkSQzC2 fxݔ)cTDP!fSEha GkH"xȔHgWqf6%cpٔ$z;(; )Lm"ؙڙNO݇}Y5:> +s,%zn{^\^jq5uf&[KZcO!2fIQ`H= xm݆[eZ뵁BNk%<Sq ;ꀭg@ ҝ+yw|BgZxąpѼb~}h@XO3P3md`)DW_ !Ir'k܌]Ͳ`cBH%RU魉$z [+oX 82 i<C^b%T99_Җu]y?. o&_P~-o7y&c.l UD:[J"0W"Z!'m&mm5wA`%@6+zmD}9Ju"qR6Lqm[ go.ѥ7n *涼N"'ϒGm+@:;a^s$?.gٜcwA;׆꙳]Ru4HcP@C&(jR&$}0(9lbBs4p}QZGiΚ@s:Em)2r.Hr!`%^;>%0/}܌PC QKIB !(.1@t%ޥ‰* Dj$)7 L݁4Oӈl A,iQMSVhSV&Q)ML  Gp ?S2 @M$*\Z38[yۻSJ'侼?@k9p 2 آ~6)@J( wI6\˄Sy4)c '|Joi"ĞR^*\^c֕LQo\ݻG`J1&sEj.918ζvߣʝ>=QyoLz {GF-]uX>~rU~^1[΀q_4Ζۧs?GVʕl\I+4-5.3,h# G%̧h8xiMfs6##WniզZM=(Li$/,}> y1F%QuQ῞OjIįdaWAvݧ?}'̧C<   `?y)oki\o47bu|ums >'QUN+-~ܒ88䐭ipFyQǟ&Zpev=8 d~S ǽi/Ce${½pKbo"f<#AyߕP M>%&ZhQxY#=TkwoHNi> V&qTB1b{!3^0yEI>@5x ESqnmJG")2`I"sSGEw:\Tgk8cE}៻^Lss򵎙ZnnB-n)|AL\Ih\ ~n^gt?LOzoѤY ;@wcBÙاooeҭm⢨L'+dp{BRJDA@-P4)n$+Mf5g~]^UyVX>MH FKHѨ Bg\FghvBFQNH@T:# 0%k`zHU`P&d7Ⱥ4NޜQSf>zR *JG GG?n }=PBht+}yiڙ\;QTB !(&w}}XޗR#')/W'PN(nrOF+{*4q~cuɟ4זgZ}Bp (1I@}W (cM8? P"t{o0cOQB*L I0Jy*CdǮ Ҕ {N,hW\!΅]ehCPޱȮ*WϪTq2h4~_ȹOuw[;7KS.y{7q$:)A s^cJYQ3b"0ޑhx#p'9z/nsz"ØsB$pqZ'8c=cÝ{}zTɜˍE ;HT*d"JBJ.*:=k|uK_$2=i8Ny$Jr[H!$xN(E 4V~=7HFdPbRpJ%PRHU "eD9"2Q }Kv?n(. +e⊵wv :}==pI0sƯW-DVĈX^HCPn w*L!H4ItmjlrmIo nBMӚHf`P!?$2|΢0VJ("ol5X T21TE/(18sQ,NH]rҞÏ[g˰7P e[5 !UwXU!5^xń3T 'J}cc~L9g솩k)ԣI9gtbnlHZЪFuͯCʁ LVE5Ŧ%h71Mɪ yU3/ ިf b=r*\ eEmY28T B)_ՋjPV*VQکTGR!*g_em^e_*)8Ϊºܹʥr NsQ+iUWeuZF-k=uJג~ Lu$MPb4r} $)8e 2FƟʮ^\GvYhwv؊$U!0*p)6rkY1--v;"OVkvOcgyte7}8fȶԞ͵H-uQ?(qqQ+$"^0n$E'S,(.6BvJw[歷hhj=?dC@ Z67 ZyB#76g̾A0lGu:BaPP0܂#hTBb /-:*s:{r;[:_:5 I!*(QD@N uģМd)vv|1OF/uqYul3?/>yn ߩzQ BUxjsV^6|^T% A()K0 Hahfu0ܩ){a 㿳f'ǼɥP:x&BNysdVP,32Ŕ#XZltZ kU.*- QPg4H8=1BI.h˾ۛv7 Dt7ޓ+BHTH|mE h]$Db8HƠpOiҒVi'jPA:J9JQ4xB b *QˬBF1Ql(!׭P;?{WH6y^`e,Q dkUS7:,JY)KBd2"43,h"ppQD'l9q.9l!{)d mޒʽj<>-{HȢNTN[4 X E%|kFd2BȞZО(yNީl·ԻiuF~ ޟM_"sq.~ .bJba_ǭs _ vm FiFrsRL7]hQZ9VrǍķBXiNCb*x,HM\qRdN%a`G&gk[ߡXW[lQ롶>Mw!L ޼Ger.wVx;coɘ0wӲ}&M +xhjĶ:(F"-^}MWABj~()imtkoCAlLRZ,0WGVWNhB"5RL`X,gM&r ȽsdT!Ѻ:l?HH[μݡb5zcU9 mV=9x2kSNF˙izy$YRՁ=ӭﵨs=Qs1\ߖ凌;b}\O)d:t}hTV_-}mc0蘽"Y6P(xUoz^EKN>w=^, 07E'ot9p59{ѣLL򈥆{1=놃(f=& Y:;I?k^~Jp@oZ:Gh)ZE,ZwQw !_av2B)oOUcaj< hrIQNg~/ zVCUپ:IO璣JiqcI/A2YA ȨO9#+%Ќs)K.H 8pڦH#bdsB&!U[2V#g((WʲPQ}Zv<}cg C7/l@F/dKlg6 ;:KfXAǒrR0YH$[9Ib~bN2F ElJchHl&s9ɀzן"yoiŸ<]mw+e/{[[UY :ڸ"$Z ҩ a:t!Ĕub@.j8%Xcy;@PdVlH%D.SРV I(+ JyßL I;ĬJ V2]u+=n2׮7flaBpdYؐCTVM9`F&>j@QK;TYunU1#_XQaU,0pa}zKD8᫰CaA5|XŚY޲ٓIM`SLʗ>LrYu"D|{u'Nj3ƎYŸOC]͵lVoz.Q_쭻:(SdYM7'7ܪ, ,f,->Y+&V K"k A3:t$z-w2!rhf覈>mlyn֓/O/[sg1esh_]zxcڭ;v/?VL[zWUCUQ/Ra0>O ,"+i2|[>ᴗr$\ཤķ@-)GƑTD,(Ϳo1J= U\}qohNܶߝ'=bl".y"c棞d-oFcn;c՘̶e8 _LP [xԅJ+ʺQk_5_îGo EM1o%Iw]͗AR`J0yJN0: rδNj͕cHF u;L 5SVmo{ߺeqs\5K= 3{0.||k3Kyu#ȔmqFFҴ[*L2]Dߋ #+ SԖ ;N,g!XVG2^s6 %lc?|CJ5H2Ea-@j$XҶ@:0D4TqsZ&t sY}TPi$fХ&nNΎN>+.Sj: n7=7n ;/5t_L6U+.4U'zp0gQUgbFY`pf^ o5C&^ b3Nv-h\EL:^L$/pgĉL ` cҖO[`frPzInJ6OvCA897-7!aZ/~܆V#oRݻF cPK4\飙 h$_>=鉫ҙ*&mH>U`+gui[b<~k$o?!>rqG8 lΙ U,Zf1=^݆WNj&܉eJ{N/Vtx!:qt}'_\Ba8&0ɢ8\h~ Mfzw;5:t$9dQ[pNdl]M4ewđ]Vř0R_5jYc<9h S1TL3eDdK2L:rPXt {`oED/Cn/7Ԋ8{QF5:vQx qf~ˎrKY&irjAָ#"yH9s)7;WrsǎG% &:[H,:c#Y3N Y#ԛeJ+ KDg9!:)Er:`vSZ< f6<1ɌJirXR% U栲hD&e&/,YU/O)9t8Őa"$e0e0X{2&-KBq)HHzp]PhNRe"&愳9uT?TW}Euv*'# EǓ `MP`@3#YUy,kiq^#?v Wހq˛A;H@,LB`r+knHC/]D#5DNHYݍ . [aS"uYe!Bhޕަz5)s.Q7tlk7ѸOݦR j.Ƴj36£U.f\i<6ڏZcʗ7ƾxK7'{R`n@_Mp0'2 #5HpBBtM-zϫme7Fˮ'6xv _X켲moUsBpY)k؊Ipri,52͉?1UhDr“`FzOE2ր4R q)eF\9B)V T c-IW12X/c!&(ŨP<]6a0nbΈm Ԅ2byEXLJ0!a$BINyCX]QjbEI׸m+qI_)PCPCK*džD Njhކj9+|<ݘm^ь3-C,{n{v_!V8t)Gީ0 CŒZ}LX/Y̔^F#58'^7<'Бz=χzN"/t4 S"5JQn5q3IeH'yAKI9kdY䴳FFC4'᭶&H\)SJS%mef^jg8U^, \S,YrEK*lԋASpy1hy~=7w o*]vO{3[w[NJY"Τ]M$}0s囖oNo)\Ajԑ9]6S2X`u+ AԹ:VIsei3[lxY:Gf))fC-O<凊iIoP;YnԓTjfsm 4A6mqF_h?q+NI-n@5E[.4gt?P|Hut +::,C=,vЬ?·m7 E <$"YfB-6 ڄ혷~cj*2ѣY=&|œiYSiNYA4a׮| IYxA߷(/̊7cކN8}|yx_}PLx鼴ooY)xxh/KVhS!BmdБ" 'R4|8<U"{gHQH)r+ĵHK`.PK@5& D;@t*H/RWJ0͠ Eb_z9x{;?7̖ F1a_^^UklmL}7]^PY$K{beO\,zU7te7JybyLbPʧAU%kAw?YѬuwfr^F:^W&g<= q#cWߎb1HErFAuԈ5/@uwoqŏ??߿ӧ"Tŧ? bӽz]W馚t4ߦkߢ_J~WxovT /og 䶳N39+ SN.+0l`*SwT8*-uUJӏ"f4#*:.+ =?zhM< :D$u!F8s#MN[Ll2M)휕`:I}0yd?NqU*`  2#JRz+WFtJ^4rsRxhbGp8T"D׼QeJXXό'!X$Ndp0):fiycl-JS"w3-VNim=>$z|c+9Íuuu,E( eE^OQygfd켻ƄX<Ԓ5)缸ϥhޙL6/fw%_|b8WM&XG B%EΙ>Ê15f-'(F=uJzTψw 1dHtiXZ\[ITnM%!Y9sƌ`'4N:! 4?;uwtAB=& eS4Jö3~.IqNJNn^-=bd Lw>X |jJҨ~\y%'Lݣi=sjaDrH&0THLWb{sĝvR}˯x#RZX" q~ΠDuSދ| gDk/ /~ԙ.M E\%!Y%&Hv˽)s+0!nDƆr =Ȕ5wؔ#}ۃǝ/dC[^nڋ2|gA fg#.(0**'}o9Oɳ_E3̬$ѩ ob 'xkyeָ|kr*Fj& s[rHh!Ka' {kWEgl_ez&ǁ72Qkd Rɔ !Dnj -ǜh4z9VP")޵q$v~?㍃M a!"u$XY~ǐGqq=]UU <* q? qfo$*Ű[)r=΢Ӽ#4ӹjU:['mtr~]3$4j{rY[S-j]nE{11A8Ee3/r-Hz83I' D`F8d0 K2s.$πDѐ& f`!#a:0- 9rߊ"gĶ_3."JmZv`SIXjTtF`e4K l6MH`"TZqk&UF.qVqB3 d rǐ>r+1a6rڨ$8>xDd%"-HJ+c`(Q%#1S, A9F.aK9=m/U_8p9EsbKҦj-vUݠcsk:r4I4D0%Di23hX A,z̝vX-"uzf5)a1'9V.)=38wam>woF539 tSg\aD̲+ݬV`n:%W9f`@D./h&QЁfohkr;Ok꿜]i2<Slv1_~|_O͙~ vnߚ͵|g&c(- (Tm K [ɝ 2e#FS֒u]\5[\`삃]pw)0H]%/ƻ}@'y\ݻxET(](] t.PJ@(] $N.PJYX t.PhPJ@(]$%S" K +{lE] dvSGjb$qg> N%VMvV6\5mSdsT癉<`U,0EQ |.R  c4*kRBkS3h|*:W5$>~W=77^,M6EcaŲ}J[ٽӗc|yGSh~t z3L*<<7b3۞Ķ̼en|y\e{{ok:޵fM)|3HE*L<iX{))Jnpb΂wDðq$ߤ0]c|=̝]?{%|v ^gzUu—Z.g,y8ƼcY3VagfP{{Գw=r;(FWN`Wiw"39d\.A[H!I;."Wn*k}裂}_]Š.a,`홄XRv Gj*<9e;.ELVLYT)li*A ) D'j؁菢oD<: sy6^zgaezQ&m E'#3HF2j0%&%ɡOyh{+m%6!`BL1jveZ!o8\YFΎ}>V>Jm]'JƮey9w/}Iw=JrGs ]U,SZIe)5%!IBʁ6T|m\.R1;K]{")9IE$I fEwZI%pD0AP8  oAXo(ظ446_Yuޡxΰ&p%$o4^ϣ) Q0?_\O]u4H!P/wRA:l13`[T:ƂAH8T4uH1nN)4rFA9֖qv@@Bii-lj5|gG'`%5sj0.d}XY&3g&D0Fa[^ J7I{9љzxègVugCGIըGr<"qtǐLE*ˎC 9]tt>IA),NKGaY/pb0 vU P&k\:$Ux\M_aN?D>j ^{?хHݢ~mj)trr<8^bDBlƔ|wvjCvߟ8B Z/8K05*͍0qtkNmQ<Uk}b1r"|OdY[ovI4Φ?]&!>jq'aHIޑxaH0 a7`ZZ> ULl:zq3|09ͣvoSqӋt:Is>]rF8JbѬy;|C7_v wo{.?ͻӷ7pV` G=$0DL¯O#@={C݇40^74UleY ørϸهI6fnu DO_LwK1mg}.nNMIJzG l~^[鶧]ST)Nn;QcYH~P_@EP}O]F6uCxFR}F6Gϥ4B`=`y$*FDP A 5²@$ކNU51DdQlh(PR4= x$NJ)V1ZZdr3xl{T9{19~ΤSzU_:L`URv H$կ>pmwKO_xU| OJM^>yo.#,>)O"X3w- FϸӨ{ y ˵)Lz jg!xUׁ.N@Wkcb:H>HGRSKBxMom#xfW!Vg4Z6f^'oQI䗺O7@|gy9.{èI5l.G٣f*hqt ؊ԋj ^p,[vgȪXb#M,CХ0EE@ ,+>-Q{E#~(3_ 3*1˵\ BT:+XFJAH10g,VAZT2cqTQ( 0A[R.k%%RHDcllJ!U@zqm!'fKv!X,Yj c>PKqnrr1ϠP9ׄU82WT8 쩞x=:zl@u C<ʠI1|i( i%YA!hN4![_GCǠU)n[EĖPN[ :aB!A s{E.\.sc\\H~]ocDd9ң LRbz}U jSk>_rޑ4BbpUGQi8Xrl14cWS T&ue$R"\8L8QFTϔf6r"ѳ1RYT AHRVaPeZ)9`ZFL&Z iȬf#gǨF6IU}, 4:'.4Taz{Yto&:vx[#>&[nOݻ],>[t+"7րE' 8qȀ.&o3^kZWM =ѭkM@]k-7TvvgӸ[x-o0>6 !ЫMkku?خ(tQ( H^Vl4%Eݦi &cU$(%*@*FrJ÷ v>/:ٗx>0SV|קzx8E EętuO]: 5`zVғ tPm9BNs4, $C=2A\&"lu"㓛\^)W^f]"dmvLR3f{ Mrބ:׻mwWxӫ߇/Æg ʌ/nLv-4޾"8`_%e|M_#8KbtFv*ՁT*M9DXI$RNSI(jvJ+DWHt\'C^>ЂY$L *HQB 0:Յ>/RS*WeV;jLK t*vB@Xf"FGzێOWcصq %y૗>OUD{& Ζc|"$b܁"5* CZD-!o+;36vf]i v\[W ϑ;Yv1m-}Ƥl Qx4eSW)^V`eI@$I_`訌 z^!\Ad ** %B >\\uήVt n/tBg1JWFkt ;%eML:Qɂc\!謑9ung1;pU'E iϊf_ݻ,ٶ-v[HNۤz-;Ik'y{zޞ-xޞayO'{٢=zK=dMQTY>.Er@=dݑ]V d~wS-vC@[15M@')KҨV]e</t>io4.#8dZYJ$<&[vE㮈yAѠigF0$@cJG9hh3d!e cԉs&Y*ZWg QP1x4hL9Ęg,DpH&mw%嘓1g7,Y6+cC6X0l3]bš}v*qS-t.yI(ws6Ri,l!HMIGIP shY+#1Yh]ǺWjU5֢à{qLڃ*eCXJI9xEW` h}4( P"o0(JV)|bJ $\ J+KR{K>08[T]-9DoQ.3/{7 M?+/^17}/{±ӎ) P 1 T_mI%@eVI/IM:RUɕ9E`: ݽHo|njtA6]WfE 7'VfYvց4--k}2Q\kO%Uh AEjh!lfR^>C/۽Mp'2?Ɨ"{T^)b^IN6/bvwBZqBtmל %hh }V:!)eWGή N]hJ';v ;3v^H?Pd cd~ʦOz9見sc Lۻ)m i .j~*l@ki.p~=lZ1%IPeVS}])G?,<3ڨbH_׻<eZs8-]}] јzOk4s5Ϸ|v>M9զe\}ٵ@J*J*PUyS\(y]@B(0.P% xt\.b` 7߄yhN ߈}%.dow۰O `[[PZt'ow%4O)3f]F8G2ad,qAp38\QdQe+o_ <"#A` yD&7392RRe|@蓵i"v1s{3w$\s-sy^GJ`BvQYDhDXÄNrv ma@e@Æ>$pZxDnӑY3s-ueM𱺾}vDw>"$M ˮxey0_1N.c ؗ; yT]f4KHRrY"^Ns$FIRpt\`/Nk<( t*z]C&.c3zN DT9Ϫ[q ^g&׫AަZvEgYȲ-aFkߑF1X"cJdUP,JvJs1}rFO!~#A5]\dbҬ.9qe C qoNwfTG7|w~_'튙Ϡc2'qMOw,hiPPk򋹶)Qs`Ն7AхERsuaXAMLڥȂD'1>}@mH N _Z&]9 *9Rv,zk"&=F(FABͲHJ;ŜӡjPjN`#rQpu|#Mo8{8o i?ŷG*D5* ޔe7w釉'γʨa3 F:ۛ^H2[1$׸ 3n(SLp.:fgFKe<1^\57M2 0#c ncYo??޵H=~m??͞Ws7o'W˗*Z %,n5?4/vܒ,eo>ީQeṁ޽5F#}9K}el 0F*q9[v鐨7 -L|xo8HdULt4hhfY&jD;XvHpm9WɆoF6:_dӬMkuJ;iL}:ߎ7ʽ6Nmx&}pzח"v۟yW_>G.Ƿy/?# g`沭i6S6W=-%7|ż1ۏ [z~i~c6v֞pdU0+6Um)oϼv _WJ^J5 CqČ|Y?PT?L]=c{߾h&:j$ʹk$qoSN@?%9BO pF݃l)RKҲE{gHx$v?XfÚ"ˆDL)[|KAҹWf65+U"f,0Lh(Ԕ1!8P[E.IR0SrSdSS|bs@>F} >աSltx(K+KkpU/]T9i<o+w_G!.5/='KGl`(Ƈ(uhuRB,w0c@lj+_?'6vF9JJ .N@/$Pjg\gN^{φr%"4s+m,![Ky8mvlA}kcc~k5~߸^{w37~f_0{Q|fy4|A~F ^qvo5܃{hl.htRnW/a[|ȼY$+@ JXDTk9䬃ّ৓;^R- ~I#dΔ6QqDt)$OR`{85pa Lc^n/AXό'!JʘĉL S#}DS*0Ksd;[B]"6> v@d\d1ڪlNFj$``C06/$s41)uxPJ #ADEBr+$h ([\G%`|2%tCg&pz;]0z2?p[Aɳ/ rj*T-Uv `^P Tz!*1e8^}܇ӷh= Fm!+D{Ab,՝ߺZC;ʕ2RXhy>$cKC< \Xi`]mR$v悷9ЄC0*YìN*GeaZL=mB( 'ms;zF~{8 xqM8=3zo=|4։_;;kȯUI= v"?ϧUW}+}q4_U96rq^;/\}>-ݗ`:A/~tǍm9i7Yx-Yv9{٥fmVoA< IS됃ȭ5T ڲL3BH.9瞣豟 DJs(Hg8 G^kjE4[Q##\%S7׀-P{qX. Ut#q2 񘻠 6KsQq ygLX  -N+0bU  x;$D16eW4Փ+ >WM }][mEBRRO^ڣi؊Ipvi,521U[4̮K^҄ =XP ipZs-2 R%BX*4Q<1C*&ܰ,5TaM#ǐ^[N/}84&j:rRiC8Si q^)\n^OD)سY"ACw86$k 㔙F2=ƓUy(uIwӧioOKvp618׶] o\(QFHR;kרv֮kgYv֮ cv֮dj+_;kڵv];kWY b&tE{\\M,ҽYZҽYJ]K{5#7VA?6Hއ_$y1S}__"tۏ۽>m/4ܠӧ徟jr'kL{X 6Z<.G#72]Ol~?r8PbFFCC&)N LMd!z̈91ge9Kfs F:"A)Q$wAo!Zꉏ܁(ӚDpJaKgʗgƏf\hv8>b nbK:tb΁ifI%w5A%6rDŽjȥmpˮXbPͪծ*!R36R*3n`B'U6A芅Q-plek'!N !BdH8DўE&DeNf#(8`]H"ZGema{XL g)T0v&/5?Ed"V5dE-C!63bL)<(bQ{4YEU [b쑇_KkfɡvQj;m7Wuyk<ć\_NSM}˫/F1~r<0y?LIS|!l4@ 3ڸ)D$d3ٙfM ?~ y3%]_Z |ʺ1:iڲakY~wed:ck#c֍AMA^Tz mfʞb=ZyZm3{sy -)mi6B9gJFɃLj)XHڀF+JJD(Qr% t*GAcd(dܫ؏㋥X&ZI$;tR zIõTh1DzgA$KU`pBZ?ښRtC }-tskK%R(_oU8ܞT1@f̗|V8SgDcK:]3rsc>vTg#Z5~kѧꃷl1 3%~S{ivIJo=F$+GbH Vtnvl06Oy.>' =ٽ6LeddIu\r&|'2>a R'szF;~ة:;Ǐ;{ /.߽wo.^y2}o/w Ml @$k?<zobho24Pgh䬫1.MNy͸[]?rKcݡ1'8vMpeu0 b~3Nv|EI?҂x߄Rh1 zd=>(P'.=vѾh-Dž'5*tr$juHA(ebRaDbiK V&،MY 7؄HznoD|?FÑU"<dh(ԌsѦ$0[ADΠRpSrhPCj'v?o"j׃}v>1㗖&V6~0KhTxR61T?/ߍEn]^a4/9p&B$: ,O%X)}fýft]܎>LV+tk%"76Θ+nm۶'K3$)MraŸu%Zrpm]n7-g8yf~Uc7y=sZ5xvuKUY'N87 q@S֜LҴn|uS5S/O9CYwrk]\\qhj䥄wp8٬b C1&g%wH,8 Q 5"V vHPg[M]9:k֔z.hCL$]T'  h|R"NG\KGZM7,bgE Bw@b3vy?2Y6.rW?~75{U{rmN'vͶ<Tyզ 5/j XZOal-: T(WqT| 5;}!9M{-1pJF~taUjQVlׂ%N to-:ҒP\&ؘCree2szmˉB0F0\CW~Z|k6IGYh"uqhD{l/pNn W3{P$35hpa LC]7$ѠE}(c rlXRy/^zUNP' +2\ES豲lԬsH wlٲMkJPkt AVK dX%0n0$ I%t+oel+U)o|U^2b1+m9;dd JpЋ˦L:[Ǎ CG"RHxb ')y`NE:?!C:ft G_6JĄFGυV()5khe:%D Pъ}5s~S $|Ĭ<]~6,C5dc9mm[wǨ2D[&Xѓ9yjs*'ojO2ܴ'o/qD>MY!| :xA/Be$Q΂TyZox ?\VeAyΧ>(+V9נ\98-iBq,=$`q4OWIolVQ՚q9  Zm(1rELz] xx铍.W4+XydnQi8ą! /kr<"w6KS12jёPDt[M FhSw}uDHPycN3OuhU6M"Pe JyN*wP HIC 98$ LAzqb%cVz@a84$iQ+c]9kK@^tRZ]ViΠ@J%,Pe:ϗ?(ve7 !FMd 4RaN-┵*m$a#c*\ƃsX#ҌX]Re4 E'J\ШWɮGNҏ_#`p`1Cm뷯Gw]?,p}R*Mz()[ttt7zJ`O&#NHc@*bb;S*9^=vue'URV]Du%@C(."X 94Y" N^y\;%LJ FWs}t( ;1P.fyނ<ʉ;Cnmj>x72 d X|1_;~7G>L>dvVbLLw"EyJ9QN(]iά;:-V94'SY5`D?Z6zf.LfxD:JPLLj~J2Om^6\󬾑p+f*7-~9`Xr)d,9ZTRѫ+dkDuRWH$s5 'jWWH"UW/Q] oί= EŜ_s%)Sv){fʺ}ru8O3g Ƞi4?yj>H9>َ,>I:oy%B"(\dO N+ >DmI:o +P[X|We0f8|U~ÂA").3UDһʳ&eegR?ISR]- !> )*D)0Voׯ#:o2R`3DbDh&!;/|{bD=>F䶪Z۴am;Vh$ܝ޽tU x8,˗6gQ^MP[F +,%y>M₝?c₪nim\5*6ż^>$cuN.. ,D J J(QjFԭ3 RHF%S69k5Il8ƹ1rj[*XGwH8>/ku[my+~sq.p$^x^éMpoJZgs }O7pyIgHpcy9ws׹k=ST,5 KK<#Dx6Jv1ȔIQ3 ^,qV!년J6v4#g#eնx {l1V( zWvDwS$7|CYʢmMyE̊UP{R֌!)f6ER6z}@ӘW>8 9'@KN.)U $"!6H*4`Qۯc&UkA9́.(cw]Ím[$mdY*]L!te X,y-7+IeIRk EHZQdV+ N+]N ljY3Yj (6c@eή| ӎ٧A>O _nǽ『 a=:vaqUEm2]BНI"w9e^_c-u(HtSSH='khS*|ۘQrS BEg 4DZ tLaX8#]ar,c;/tiՌ͐ڧa>x7yE5k} ?K~&t!8@>)8_H5ČBvܗpCǢmzCs@?8=9yR@?">2 yc?o{G^gӻM~k|;=Y.q%'Q[+/'ٻ *T : ӡ$yWJ"i$ZfuY6YAv"J%hRR LfJ[];Sl1{/hEfOs6 o4}N˘wx}rhdFuX)#]7i*H}$H,5U(>ŝsYՀ7 ZD)7-V A/z],j -z,1+LII֤BdlF kQʳf;㡲P5j;n.- ▝_ @{t||x:{[fG֡,Y>\333&"I[4(5=tr|蜠:Y4Tgca"(DWiEfO'xElw9\_FqotV4zD\'I蝦Jqs!#$+fo6+TQY'ҡmN RhX7&k1&AoTr&c`ȑ^S`MZja9[t׸/̵Ofr5f\kp];`Rwku:h_zս[8|[Dy }'eAғG0=Nٗq~ I0Χ:c钆A@8@Cߎ]+7%T9(2A*[kR%y]Ƀ*C5J.Z4Qa3:Qb)~QTѩdIDHLuyڌ-s[['˒ۏ=eMV\ZRt1;Z)adT,IbAIAfa BљOp#Nm9%jܖ.$k.Zb,b$sa) %ɴ* N'fݽ99NO|@՗bkH[PޭS,4~b,IT^\<2Ԇ]5F#c519BHΔ@]6j $ 2}brܿax\/gE( UgO RJzDd0JN;˦{sf-zxv[8T0{֚8o&R !^x9-B7=4{b@>DS ,VXiDH;*I@V+$tуo%o8I| dtL{u\(\~_/d<>AgnԳ< Fhy`v{9;L7=j-e>*1g6o 71.Xm k&Z[{WH%CrVGiUH>h0mvl&kɛHL  C"\ZAZ'4 @À96sl65P|g @-\Ⱥ0P$b,U(/ɪ:c 2W#gWw OݔfgRߧxýU%}Дj!뙲L*ެKHD, jzRpFRZ[rHFχ|Fe1@oFΖ|AR2y/Ջs ˛OQJ_?x{Z1]{ymLvBtDUgSL1^ADTX#R13yeI)fH“_A@]U.,)V0>(oX M2%Dc!/gkhj}ݗ7'TXgȐͬ[Йav.54_;<8bī% Ж$=2ׂ3B YTG6DE Nh #[E*C)冇B*UP70RcFzad]B($@-P%&-,h&1AYԬϪ*'\uNf/]ڛO!Wg>?"/jzkRaaWYMf\tXy3?ޏM~ҞUGM8p8(_qa|P$k@5L %dL|Nk MtLg,O8vk̉ L D} |ɍKr+>-Oɬ/p8MH8JBϭt,ͅ|myzpvrvx{ex3jOGs"Hyh|Nn.lkϴ=Z &Funuq;KbNI9:Zh49rQ;~{LΙػg:FNcfyL49'^?]-ۜlٿJu}=k}iu;ЊGĖNϡbJ2bP/ɿn457u'oqdqy#ގaqwۏO?J>wO P8!h~}ܚ~~j[Ml3ob^%[>rϼxq9\;g ?~:B=?dx\WOlf󓹦\=Ŝݤ*Xm EHP#fz)GU}6៪'>Z:_|ܩDw߯$S伢F$mE + 5.\6ux! JS[V2ϟhq+O5THf2:edAE V.=D-C)?8u8u*ڪٞx`UG}h:%/xt◞)~X-/㗖 xb~\Td0WsoTbK?,“_4}QƑ#ٿ"yoa06{Y`d0BV[<*nb>\U%ĒRvque*+2(W_fpG7K"~\+xuYyA1wt-~1ZHWN g$xÜM_w te OYc|Ѧܿ`<"CWތ}r:\>czA\ma#6}r:N'ˋ:hn>zC'sb_N?NV',':䶌ڤDFQcS?\]~:k ~_6/?ap8`UHKdܠdJyMUx=};znB}$DH`Lk}9;Wc51;k+K1LLmlRmhԤKjUu1VK|- d:E5ܮyHT/L&[/hVF慶/xIP=r~} |ΗoOWt YWGŚFb򉒟I]$\m-VF''YS.`}V 7e8^gP48~wLVR;4c9uV]L-̻?IZD9A)Aq֬2`/ؤ}R\C 8]wަ݌;l|]~h0RNT4o^H@o#V6c0ak1LQ&ᴾ g@P6޴*%B)B'yJoxk`O/ŧpyKnڃe Z5Pmu.'M5u$>,m"Du'Q{lՍ %m]aJ]?^NZtqr-) ONn<oobzw_..?_js=.&><s>!w̃vwZ='=CFl/Xύ:0nSPk[ˡr[=mn~z=pF\Rr֘Cآr$U։7\TH=Y]PqY~ Uє)אc12X8#q.xSER5^7s_['|BA e/SĶ,_BU9s2T3cI!9<دOMfYHxǵVi(IS1dN1p*"\dw ws muw46G=**nǰB(BԶ&vfZ/Ĵ\e5 =\^ e =J0*`(`s_}4fb/aX/˪emC|gV"* HIS :+c.]Xg~ڶA0ERu0u[2[\Ӊ(A1}43Y*nXﵱg)SU}i˕88(E'@6AI̹@zSb7h{ Cr{M/Q Q2VI>fE2XkN1;rW>W[6yrl6ޣ8uɱNqUݢ8}m"Q& ((Q-NR1aΑ6jM3lNXLxͮOqƓ'w~zS<TըJ)Pj=SM!XS 10')B-Pؒɘ}Мɴ."mYqB V>{>U5+7XoD_K wZ/swQ:c .gy e*;?mW_;2ToBqlW}ձ{b=wԝM 뫍0 [Cyٴ\V9Ȋalq5jp]1@Z*VV?VNLĘlڻR9HpR^ CtB3cQpcQm5pG)l/ \H' xfE%h(/DY1f_ZxA9ڳESukfc_Ķ`Bl) 6m\ &"$6.KXOe]./ꖸ jwQ{fωE% ˤ0ĕK29Sd'M"Uy ׺\Q߲3CXk' YvR08 FTU sg0:wW{5[[S,vy!eŒIzbF; <(C *U ŕ(W4Րf)ytvx鿯s6f*Nh x!bd )fDZ"6;j)G3cqRH?ʎH Wޝ|]Jw6}>.oL %e^ƿI F>)oK.' ;-ؑ@fD8"30PUʭ8u6!VUS`j8(T .TGU9}BdVa5&4I~:Fk85g#~W0諸5,%c|y@[ vF #5jBO:fOKQE2BLT ŭx%:H6ɇv\^]3섞y7⫈ δ.UxF(\-(Ba94#Oit=y҂!h 8޼0rzW.fQ& &&Ԭ>Q@复)@!T[)茝N?d87}(AϨGo؞#ėzxsI^B K|G93!9E+)dqVQHbC1 `Sv8%'ȴJYnҬery0 olj$cjn,u9bq5os;]͠ՉG6iUa2κxߓS漫+ؿrH#˞ZxKH3rS)yy}=Eض0FjU%_Xq Zi4䂲UEg؅'.&龿}N{6(~v{ 5Z*R_ DH7\SAl%=k}@@aA?L,R[b兓Ǫ)gx^RK6gz|.J|{n\ɇP ޶Z` rl'v:Ⱦf; U͈f}g1# bc$"[lŚRK@5cd+\Lĺ3FIEN5e\ۙOdJ8 /LQպ( ]ɐX)T2>EJIsms:n<0`+\9m-7Kp)el iWq U6OjgC?f;^ˍ+2l.06h6tJ2jĢL*{Q&t} 'e*ŀ%i{)R+JL2&:AeǐQ(Aٶ`&z${?J)]в4#eLzig}P)[rNcNEazVc*z:V*wmH_i̗m|? ̇\&; pdn,&SI<_%ˎLR?"z< 7])F @<F5PۈRlRV鄈G}6+J Hiÿ],*ZYΘ$E9:A} HDF$OPtRYIU~M&BȐfٚϼ\:=~e<7'gJ~.?՜=J@h9OX paΪ7$'rThN -(PIhR4̧i^ 5h(h\ZqN9~|}/NwrU : $zsgh'2!k\kO*'ey&RvN]rBөt--ⷾNE:g1GNpE_NM-rjWObXw7jTmukN}kݓ_/^*fKb.$GϻfmWGtU~ɏFrHFIGX7 [;l06ɴ'\yN׋nn]&'eddIu\F|2> w}j V<ߓ}٩:Uw& FȎA8zO~_x˻^̼{_/߽8GMd χ`@C݇8bhn.C뒳|q-95~)>B席1; o߮>F dπ`{/ZnvMGYp= UX|F12 .Z~*# Eh!İYzxIuw c޶/6GHrHԙ)͢{G%o.8PfH3S^5HO}ذ?tp=d24NƹhR:MEN=',&%FtQ1h~ѾR9x6ܙtv۹GvmGrI#kv42FK=2VQr2 ȾzdLYjIq/,D6g٦vcqܢx" -jf Qw#^b7>r|L=xfv.1TZLkJ$|u{|@@s-vs0-=EA:@^'mmT%$G c 15cZiSbl!EΑ,S8>Yk[+MpԞRD++>-(ݝ}74_ ;&5"qpIqM$ #^+\8q5WQt4(9'DHR Qs j:0i\,|oc6䄭F:i>P V%G <_AM`U9enqȝt삸2;w+7农|y)ϓmًc`z*A_}&}UM_\iK`X^KCh-pGL%yےK:>JBMPF5R < BAH$Qs'6,-%ct1\-o( }}i%ՉKYJ{ ѫo 7\%tۏ'l3⭭vnIq٥lsNnul̚pn߅wzz9oSw޻[z!r#i]L]}"=yӜ R6gi]$̭uԯe#]_$ݺnZ/zi\"畖a6not{؞w$n|s9}tK}q̞ye\鮮k.Fz #l6{t6ekye9?$?W僼2B_R;./ #h>LJ%D=]S-E:jIc2p?@>^++MWצ4WWVD{v8N!)R[މpuYc"KEP Dc#& omP@@2*X9o㻯Mr+r}_|sqh4>I5vY]buRU?~ٱwɇdMU@JI>Yɼ{[> ^ZKZwpenawJ y,@!7* ѫzmu_{3GU/Wua14^'7/Vͦi@ȚҚ3Ϩ81FSka7ފ1E9Gcfr87S͡[J;|%嫿]6W?ܖM֦9>t 2f^Hл<\֔lsT9h:l0z |37E/ӏ;B[2| P~@S8(m ܑZHfko?Qʤ_囎VetL T)$2h-7*ƬZKՍ_(٭s{nCgr6r̔?ta/׆pNʧe KHs?S֖F_d~1{ ^梁S9̥BH9-\&FK4$,r\$!CU U-PC"!:SO15i6+ٳ@ͪVR@d:)sU碨4]k`hť*-[i܄g4PDGRe(Eдpjwet !yOt 8jNQD95QղAbW)(%XXNW)8c_,d XXx+*kƧ=1gNs|ܬ_As>1bSO, &ߣhSgDālnEtR"ԅq1{U'=BCM4(A'툱 U b"M髎RlGl?PPw|@A}JEAQpD\s$ayCYgk(AQm&:IЂ3>x0"%̈́NSCَ#j,Yg1.Ua\.'ydgͨL)Uh)-BB&Fa*YM' #ZM9ca1ua<#@XGq Mef7x?>#ڔπzѱO6S ŖFjeK"q5`  |+Xn cxb$MZƒ<%NBKXՌ66MEj&B qiDD2%Th-QfsEgx&8F=X.FΎn3M?_#?wcOėn'[7ˏܷB@kRx)RReL"L*)}R|uHrelxp<;Zb6ޮ,g,%*nYN?NV {⯭DH54<-*rZdb%~t8|u卅䆳xg[| G O)O:`}5z[X#% m#꫔Җo' " K%@< p.ALC0ymHDJI$ ̇TvBp&O|R0~(zj,ׇ!2Uh'zx ]PTo d4o ӆi~\u$@Qa;64[f`ϙ#ԇA/",Pj;{o \vmn}M+m#I/vIyD^7fhwc>B2aTmy}"HUEĢU/ˈȈλ ^mhp+^qӮ2d!пzrWrs4 N5Q|iQmL@p %^RMtf,$<<؊8ع&|-9Z8xm6zK.žDC}tZiQmgMόe>v@/@i]^tq ꅝǪ.gx^7 m~Oϵ|.Efs-VD^=qrcY`^*A;Rj {-,ñY?=^~kzJu1şUt9)~ɏqrѪȑō._pa-(POgzGT!2QB.: GgJrN?i?x",9_4}]q ⣳8;FrkMZ4ΛaeլNwhC+|̽whxulC6w##k]4֦fw{\\W_Nc1NEczDq_߾Ԉ /8'_6Տ ?/Go^MݫWoޝRO_wo_Y)5=X&~T?o_5mU-ZyoQ/m6o(2f~V ϗ_^ϑW3#\h•z )o7҂]=*}>.qY?^*zY|usub}MFj#͕k#QC B)+.w #KN[Bm2əf9+HOlX@^Y]t8tm΋4#|ϖ{kc snIKQ]/??ImQbO5r2{."!졙]4IvG A7~F֣ {",NK;D- #Xàodɴ%@tH8o܅{Gez--k0<WĬQad1:tsр#!*ǽVm94) znl $ TR$Z <#:}b)ÔTGZn[& u)sf 56icyt'h?d&ZHvs7Rۓf8/̩@T&Wx[b҂ý0(i_"Bw [J$7QGAB<]9rxGC#Љ;WH*jd@E$$Xe93e3'tfFpll٭oe&dV-v4RSL1--_Aޤj}*ؚ82Wp',|\iύ3ǛrC&}!nyr 9}l#+o>^XW`*uM諆`кz~9?(| \,] Mh V$YKԻ핎W:]MPSAȔ"B{@V " ,6 ݪb1|[NST]ֹ)K$)P5rL&ΖQU56\ވ瓫*!.&ҏ/vOl3Ũ5p=wP\ִ;&|:fg.]֥[~ݎ7!oW_{yu6o^)y>L [<*v>﷘Q.p/_N+/nj̯-mF}~6tHC6W-6ߓmN h~`ʞg4 C9\ S |?޹2J$F (̊hX[f6 oK7v޴<}(.G 1'|zx"nGC݌6H\kJ=!&ji.*`?  h|R"NF{ki ^X\d1 Xd!gX6vrsݛxchyԐe-sT wC>l8ojk|(S6EÖyVrB(_@lJ'KaTA9럮U*&1D,ш>fA~eOزG'dA5 KPPrd7Ȋe )HRy/<_r{ɸ[조+{hU,ɗ|ßwm!ׅϟ6ePk-B>!6jV f$CR_,޹֥vM7Z.y`pwfܢ͞ x xMWTǍ< Y HDKH!)pLL|˻SQ3nX=nIGQNht\`RxƜ !N uăCDChS#7θP r;1ºeձA\{OVu-v^ۓ'%q^./]D1z?C|^fe(M\Zy}+;+ERlT1$/!bd0#!\A*o7iLr܃)oPoWkA :Yys|~D\ D3ɰl|v O)몿ګ"q"iH!Th^yܨq$p1dY@[߯$bFa=doѶ;zw܁JjDn`(³ ExvYJ =E37Z 7?(vMEƗBHjʊJc.(]ԶthP#SeC5gl `J3b9wI $g4XB`Y.IhՉh{g țAD3łGW/(n OK 2|>OOŻF~smsfK"{`~@8K0*2w:RHeΕ)|Qx}Wim]=DĻmoô!nQpr`mimҴiZHVm~?qگ~*4[nss=L3y@p6`~y(p}lp}8\e)p iOp x{pWY\W( gW\kN  \@pgW $f9+գB.b:`x⫫tu ӥ/AAD*d<!; D '$fm(q@0sY\YZYJz~0ք0}9 +\&հS; ney(1(aG,E?QlA D 2feƼ1J]Jq/R VrJd,C182 4?OtMI_nQ6D(oEmF v_)D崥fqE)Οd)u2BiUذ+W1K+:of)•+goel-!a6#4Х|Qe5ػsQJ!d,gq8Di)#],%#~ d9  \vd9Kz;+8!dQ\:{͊b齥Ȋ+[*K-\-pS< BWY\!tFp QN j=b9|M \eqJ t+=\=T*8UL+wGyWjBz90*9 B%=UWC,m,=\=G3$.-))?SFpߚuv "!$HP&tva4l`s޹2J$F (̊#Yx@O&{@ֆЧ5E*R>_ DJ9981M]]UUuu&5'a ~n]#+tS0FVSv}Yb AnI1*c9QZsTW9c\͚Z婫D%ezJQM 1kNQ"WȦD>yuTܪ種4UT6 ]5L uԂ&8uudUWߌ;="9~FW#W]=Z},u0*鉹nվCGT7H]ƨD.MQWZN]]%*o3TW#HV1 %7E]SWW@%ǢUWP]QΪ`FxcU"Dco>\]rhb\(R0QW@.AW3<>eB~:XcP1(ۺA#OmUO9-q@CƉ<:n̫ۍ ;㡣+vO)#AjN,&rh j9 ٱ~04F]%rec\@-G'o'* i3TWR<ڇDhNI"W+U R]iFՊAi Og F"Ε-fA0fz޵bּ2{ףlՖs~e]2!ڝke"PBf5c#pB10.Twk*1+08$Bϯy_n8F/ҚЂ++fYe1 `+v rQb0s,IÜF By0=KB qŘ3d͗e'yL+U+  _%u3Te|.TruIIvjʧբ@z9Bq޽ϻw@kO Pa~qe6ŏK^KeɫWp r+WTɖBݿIjw9yY~Af8G\pc@l?y_xcnϧbnKL^ϗwBw> ٨0Wh_V]Nzl17o&f-hCQyS,2菙֔W2?!7vW5 F:]k~a1vY4a#A2ߜ ]Qg ϥUO|Mv:oC'zm6w˱hiT*p#S|Vd~kRRh}ɀĔm)/lkX<9'z<]_SUL^\RL<8yo=ܡsp8h(0dri+ ˎNf~q3Km; _3Tq) Ի%wwOw/vJ#`FgI|uf8LSNAZwQ|{h$ 6S)"@(t1^^Ekh7W{TtWKTZ~RdYeU/aڹZXiam ;&B`bSx˧Wwp]b\vwT\q^o1;[Vr j݁Gr Y=[ Bun^ȸ(80ciJ6Je!٠LfLVK,s\SA[b]8L"8`JaVKEF1G)`D_P|^G[<29# $<<]Jw7vHk3u'g0>×V'emi5%ULN-:::X&5kNvD.M O*vAJԦ zAJie׷n5侍ٍ|!m#hڀVc84,:ٶq CkB?ƤAm]Oak[!rFAog `hf)1H4-jȩ(7tn21S< LaFpe qk%T8 |iݟhpɒǞb"pc1g;Ja8R׼6rNo[>ތF8Uz}xKo?||=il$h;2xr801(zI!JEZExdW5oMG jS|5fm#7G$>`CgMyo3V4 d`Byå O=cF=3@cLWL#,J[_r= mylGP򳛠2lVv;-;PD)!X"YX) !hfSeJ0qj=1XE\8!:?J =yFj@BmHH A2F֘Q )1)mG^ߖ6mw?ο;}r)aY_A󠍻*EvJa^RiYd k ıp{sh ' +'-Vqw5m #)S@sRY+ J3Kd!ʓH&H uc~M&fNkd'^ͯ w~ λϒô/=5-ߪJ(',*觭TM7 30CgIOVNh &.N??cgT΍瓧!F~RL]y/| :[  0a ťCRj\m,Ѵ./Ϗ,?/+S(bؾ_n{˫=#BCüvz \i\}Ko+ΑhG>R4&+c"WЦdej9Jb6+ʨnQ+C&kLiG1錅3͐ xmG8-Gh=7q =hSS,3! E=`yb`,A5Zk1ZACgkVJo2"8^ Ҕuc͙ OK غ{|,b-)1r6۽ay;>O>^P'̧ eYmd1aZLQ<9ԩNu@ +}A("zz-Gy+*wjjE2(:o;cmڜ0{-#chnDԌ:k#g;U)2j-bs!_ } "[Hl )bQlC:vRkD@ǐ$w[5]QQq!]o#7WtoI /,Hں%C=be,Si'xE**X! K-e+"gi{Ej)a8m=ߛSq':j%5y}뫽˘m%a%.g;z*dd 9F?~.UQywQZл<NJh66aQqrLg9+:k *Z^À1XZib9%4Z.S"Dzs|&ٍtjXX3BQ EGE3ސzM; fv?î:_4hh44_9b &M?XТ:) FOf:KrNF[?$ ?byRd 6hT0EɶcP.)g2y_u"g7b8ոPԖ=j[⡰Rs>q1$R9b͊l6MD d2Mz֧Īaz " 4CȊF,aML MT30b֑HxXx˺OƦ"GVPZ9(!'I&RK5Y C]gͨ6NZI!$Z%rJ&neD[٣US:%⢩q|@lwI|@#YHA,actҘ%Qk-ٔ=.Oqǡxh+ၕa#J<}gphpkz?={Fv:+BǮ|cFgG'flq~W OX}TVSy$7H߼ RdZ")B$(DV,y$ vB1ɢb`(n6\U,HڛT’Ʀ!&IuRƅ3+ix_eqn}o-vPiL=mlnxw}OgNK5V0|JA#)@4`8?' :3ίb=G y8rRYMʿS=xi"99 Аd*! ~2=ꑧ u&A+q쥞)K6  dX<3H%HL5VʱH@%doԓz\- :a}Zl}CQv"e7 e_@d%}}=hf_9gSδLs8&iR Rq+ GM2k;5PAV8I3ܪx|sS܊)'*-f:#2R֧\,J"\a*ηYpX"9d 1=^Ss[呴6QEЎOKO) 鲟f1VFXxfΊCɓr~8`LPnie+?E&ԭ%5@gW@X,4^ Mqq/J}^m6i߈L8m1"%rY-n<{Q_~GfZ!ZX"QjxV(&ó.A,GV'-,O.ZXP)u7]JlQr})6[/e"mz%KwC<-K^wgfs^X\VA{<F3!:J@^%`)ΪZ}<[\ LRU[CmX|h_-|˩|SCiە֧ c_H_%W#?j$GE' 7vlERf'VڰSNjT~j.2 !*(xKTz.BdA䨒 k ko,pۅ^G"(57(B#Ac.:v'oJi,ZWLYfl~snW6ظw޲畖7nO>Ϯw9O잸2֊/Nvg_׬Fߍ_IkAs`w({J|1ЭPdp2m^]O ڿ'Md-MWIgf=i ۄ/nU o8uGeFWQiJ14'h)3LemSHHYkvh3K" ɷGIhu12:=عZlq\]lZ[˅Z>, Lwl`|N} َؼ7'Ro;頳3B \r8*spU7WWD0H}6pUgWZ{pUto$8%U!؝\ \jJkW"\)mr\"K'+7In1,N|33`:/F-Np6ALwLn}`.Z{.0MjfO +ELkG-.v3?ae11z;i,-O~L3g-Pd6!^%h|2J)T|z3/`>Y"~/ؚKim~EwaЧsV\Nyw)1WEVs "8Up(h%5?+쵅Zs{m\~{q3+"Xs~6pUgWZ%O +aW\YҒ挝 \rxj|z*T蛀gjljX|FeĄD,V2\P6ưy$&Ez%a1c nGl>aY#1J+4#B1 1J~o)E_ d>{`'"%g'!W=\׶>ɳ,F3wĊ˕Z15%7 AQ$ yr<ݽ rxCpg|ܬǟ/<5_Nc1w6u#KNѮ%/d@/А7 2ۄ(E)"Wٲh<v6M6B 5g'˥\KrejR[#l{-4 ꥌR~/5 Y3ΙUQdde Mv:qc9Ib-Icڃv&aeBNL:fh͍ (r4:*d \=36ZS>{ ;83+g}ᦒ!h VrƤID&!dZ*BFϐw` @D MR9h {03ą$uF0s@оӏi~3\4Ҍ[_jBI6rhትOƸqs# HrRʈѧ\αD[ ziYX6@.;`WkIj!:3ͮw&9Ήwn$BPuf9Y"hwd>q2vx}$@OIHS5U6Hcb(Hr4:H <0ҵHNti2}I32m8@ 嘴UHU^5iyīTˀ@"iʃ޵qdٿ"0ma zZȔGă{.IQԣ)EjEua@UqNu cMpmV:njE . ـ 5Z+3\i!/ +ae4EiZuE`!ThU֎z*IU%‘FiҨd :JrJ%V 0 CMZe&5:/3(|" H&jZ5 @6!+*]7x&4@^BB&>y4_V 5ȤuՅև 7f,XN5~T}%y E*ΊfG6Z/kI|ǍU>}]b<V.N&!z*`YeD &PG]JO?bRnB%| \::$ճR"T2-=V iS5Y$a @AK^ Bk3h#e&˽e;VН Gص7Fu&M35)JL `TDAoqVUpVb, °" wMϲ]-DUb|؀c%NڲhF&֎h%J"-A*5.-z!"i,e4HX6 ئ{tВ0LЋ![[T`M.ڹ<60̏]kZs#fFU n=nNN"i`$timl% f 6Mw'QEEmC]k Q^y(Y=y`4vMfcN`ʻ? 0c?FzҠD+Y.T@=ʀ BjЁFQ-B>A =c-XߴUȰlEVg('Mm2X4M57J ;XIy/nLo("ZT1"&7cbw ;^CTXd 4 أh݆>Wc- ̩m X{uAfaƑXV=ؤ=ɗ Uw&dd, D ᪥hsOH:'/[i:?T赫 >ś"A[vgVup$k u7VP8i* HFWP '=ʤ轐X?H(Ar|ZIN=ZMWPO,iJ M\ѓF!*ek LCqc I}6ՔF,KC$CR ciVMZKE`$9D. hZ&J#:ouޮhx蝩yd]pB-jMvO \ nP6 A|jrE)PCL溽Yzj v^ +b%kT`\`%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V*'% ̍bz@R h"+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@W h\s@0؉Q! 7J X+`f%ЫP VW=u"ϿώGHٜ'!q7o?C~8< 70iQVZbu)4-XzԹgU"R5)UhiMUGZ +SKHl]ώA:E(7}<|uXbuX.Vw]buX.Vw]buX.Vw]buX.Vw]buX.Vw]buX.VwuA{{㌦S/׷ OKkuwPN/fOhzͲOb4`n{#F e1+#6Zv֦*x{MIʟ9Iy8rOT`5Ǔ Ӈvuq]X%硫Ef(!(H8H; Gt[+c2ogB/ޯ^Gc׋}]z0chQl+wxwK_jxv>0n [־V{ˮ)/RwT+]&!7(fg8T_ծZ ]ٛ?JxR#Q:$ƃjRos_/f3{~7?~~mv? o^9u?ۼ- |(SzF7._o|,cgreӓ?鶛|Ώ_p |kR)Op=. Һ)w>}GlnFpa0-!k'E[l=l=O=F7Hfƞqd}ntK*l)*hj*(sotiTR)eJV4X<LRÝ(͹ȉgp0 !mг 3hOwcϐjQq@r:9/ߵ/L9;eJI:Dh~/*|d4m6JbTLQNMw]]]=/0l1NSq{5{܁o߼ ӹg7Id6Cr P L/( {?ѱZ`hlsiFejTc4cɩ9`ZіfUWcRMXMmށ&AD0{iƒo{W0| S>$mv{㈙t~v~WWͶPԓz?9is\Ŋ^5 &:RR+Z6cQ-׿-_G >tx #Nd g-p%l?j`>$l|ɹ+''qrhܢ N3h]\5&L879].`whOǏm(Ȓ' P]w߂өW?dތaO-J(LKxm,p(UyxJkN+s@݉'KNͷR5\]" ipU M":ZU*HzM|O̪R[h|t\-c:+cǘ@`z0MVW|AR1wtg+z YN1C1v UuڒAWCg-ܢz+]x V^AU?nQoWcv[n1.A$ttTp m |@-2u<0ؐ+2X| =VCL! ;ҐiFG/Y7<妶z|S>AS&E[}uJ#j%+HS¤I;Q'x6 s?)װPȀ unޜFoG_""{O;?oZ6gčGFG/7^[0X7klF[dSY ؊&80g~GDo %~g>8i+_z~!vwdBLcǮb ×@%wӚlyUW49Qw*<9y'> %$uWi7d&5G( ZʩWdQQ 2p-C 'Te >h>yxW:bF 'zj6`B; "'8}oHuzPf+|OqI{dv(:XߤTY W:ΡΰC3Te_iwk]Q*"ŐQW;iNZjٮ9ZF\KʀƆF?5.u2N?{Fr_2RuEbC!SbL<O̐$ISMصdNU :_Z>$5tG=_RavWA $2SV%b&dSqQi4c S0ٹީ J˺y -؜ʃI!l8.)c$<f\rcR2z ݼJYPŤ>(gi~|Di*ȳɽ%dUҒZɣW:3IG"6p6kt>x}/;'gջ_`نMݑrRkЌ`4l ]^iDGr!cqQקc`0eL4$RBڠjYϢK1N'ЫwJ2Tid,F!tbXX3vBV  pnkԌ;r6JĦк ^AA:zb`0I*蠝J3ZX@'%PLvx1?$4N{MtP+f`c36 2g ?9ۏq<.6?Q}N`DRIQ͓ ,i&4w #b1r(pU{i !u]qQEq#.xva:HjSJ8ZJC`!y!3#0-AÍZ<>pPw슇0Xg\v:KW4E\`Wƍv -~tб-ubhX up 4P+c]vYT_ D rOn| +/7H~n)KIaI$feyΜF` :fфۤ6ի 5ML&a|L.B eV1'Xr'j"QH' r]ƓїCy仇}:oZ"O$U4 V~ګ+w=l,|NTjf22 M0,EԚC=9Axa%0c Ke;TY(9Og>QeLNLUJ1r6KJ=xh4]ق^|cm+eI˒oiYBՍnRy)5%Y6T*qZlRGJ MQUV!'qzsK߾Bϐ<`I+r9,eJ, Œ%9&FL&(3w5zOfGyvs)}J\>}qq6hۭǟE> ݝ=wz{_Pcs|v8H>cJgZ3ToD𶳆3NpAV*sD rG`87H \pB$jn8!!r ¯d1(Nĉg>Gl E*(tʤ.Lprc wf#E ڋ [#g:>#FNGVWz3M>\n[[b}7(Ӫ`ͤ7TsOw<)O>xՄIqMRJzJ6{-5[:ώSxFeFky3p#?$s]~ڟ)Pz'aE1neOt额җ? nnJS:ŖPͿG 'MjO ]~N@]4OGjS^tMZ*~7ux $vLjnHIú^g{Mnwm{6эta͓Ɛ[aWym.-jU}QqꭚIkێ ^]4sPԕjZc.Ahm,9bݹ-svPE ۍul qQք[5&]uLޡpm79 Aچ{&M1!Xf 턷i& &ݮhn]|5=X2']O@ڕoߕz~5f'e©D4:8039+}gGRPKBfIFyވS+v2(ё4Fd&$o4QaZJ* sjp+zȁ!_T4Jbk 9A~`(accc#ғ&Yq%2W$RDJO*$`ѯ\x\f;w'(.ש;STG,N6X0 ^df6&aM.Ac]0F¤%:[e(gsća Ё4iRȅwϹL*&9 RaŴ9[-rfvW푒~`K}_OBJ_>ޜ{ܧ%|$EE]ær0cUJkPk$U(SKY&ћA؊a%^0b<{"k1Xb10Z6L @mp"+dFSA"%Jֻ(AcPin$֭<ޡWE)hPT9( v M`},쨎XNRV#Ñ۟kkFRQ(“6&[*0G@`%s rт%%\p)P̎O@6H5P4TVk(ĤOnQ<.|Bk~8Y/i8g{z-5=^#CUk)tS] "uFtJ`Z&1Iت 9 1В.6I{ %VGߩQ{Զ5h YQ 6.BO_gk&'h22~$F횫\\FjL`?&(6N 퍚_|ܩk:Ut6{?v?xsͻ甙z{q~+0K6aO&ᗧ`@퇦0|hn6C뒳n|q-9qd1nmb˭󏽟Ӏ;/ZvMp!\c/οkkKo9BEM՟Re$pk1#^@L8ҭ/X_}>M|Dj#oFNi|;*,{uѩ 6ccNy4!h#=aZz}lOs!`P:e&IHd4GXpH,Hrjr`N;r2*;:-Op2^)7;zFjm۸0m ;;SS D.%xyQ)xv<2EGUk97{,|^_z%>sO> * mwnlG"/px[{9Тݼzmۯ뭻t~6OP0Q󧽦(|gv=N=zoc 0n{`C3jdwzn݂5 u<`%|]Z˫iW`<)`ȃ2Q,B%`R4+nIcI=4^҄ 8 Jq1MIu2aeu*sNN;ΓL;g3J.Km[j F#MG* *g> 'dd$E0KΗ"g5.͐_z@3b.2s]sLW|%BczSykeC<>m~TDKJB+;ZdV A9XyZyro҃PSϴ&҄5 > 1[gX(>Ha9d)1ETKIaQ+bBG/(181'QIVgs.{g+*#g˫ˀŠr&Xd}|9Wr=o%tHoehۜ ;o$?][s7+S~z7#~QwU9UCVUbL :4fM"M2i+E@wFr b$;IIDw @R]qWI\&]%i+"ڹ$B. p@t]}߆kP3ЌTݠf>SQMյٶJ+H3oKA#.S u aeΠFӻA6 cL7Nx$0t;7yc[ЯʚP L,<-yF:H*0֢Ə?/_ߛc*QS:c6eR.gی{={4xuG%YJN&BUR$)N2hGzT8fTAxyq/$)'LO `mExz xWO)SSۍvAinSYqOhl^NX%0z4ghʋu\*pa3^޼ŹQ%vn:z;MZ1tc`\bҥVpJlZT7[▱ .|&>mp{6uyvPNо&d\e]5ڌ6su)_'oo~äv$g6*-INN.Mn3Ui aх71m:csvY"3IJɻ2!!d"K2bsY'ohXYA|#<'_Y(ekrXV(NveWhFP_6 r v^yE9Sp-/$jq4 .Xv'J$Hq;w'R=Ouz[ET{ɱ\} %ȗZrV:Afxjp.`9VNƣ~# =s9VN7j.s~7;jE E%ԖhQjc ]_Xhc܁^77a4ͷ=K):zv꼤Х@ҙ:Qg{ rw1]_UoqIwkXk(?#3N{yFMRPmgů~˸cS^l7yEkEtʮU.#@!)eܤ2~fYpOs!eV xrkglLkTcl#Ft~H/>tڤhZ+))NVNhWc MҪϨRj'ʸV̧\\,s\)ϊ[?Y&V nJ|%b<4z>Lwpqcg` ͅmoRJYo`o^Tf :3h(&Xhpo~g|%K")c?&:&R$qW0t8i?⓷ź8^GonS/~П&{e%ߩ;j.F~5vcT٢ /!w !hi%SMlɩ` sG3| C̬|ͬ,(̔Y9tjEV=&l EUH A2FNɍ5Q )1"PmfԍUqQis}]g;E0/,\tc}knVBڕ[0m,m#fΊ_~~[\6xzjp ]B'I9_7o(0uI*e!O v"0:RL݉pm'Il'Xb0} P& ,.*,Wg[{FEOMz*,.fcۭ?N?bdj'R?QOۋ5i?*,q:WŝOA8A ij7N\Qߵ¨fW^_}hO?O~?xy3jp>̖#)3a {WR}7s#`8xso60x -)%Ɨtf-aퟆش@Wwm{:a$V\ꪾjԭVDHjm8nR6&qOp1lM{/ ЉwzFW?;zOo_?~ocޟ۟^ŠdB"s7ko6oɞiho4Ule^Avk*UdfKnm@^on>W ǷQ?58R鏻Us.*Ʌ~3ET[Jqr{E}Z7#] fXo:~ ?M|G{H-{ R!IQk0}41j ),,W(*9R ,A#YH+x }HoқHd_VeZr~Ug{?;cن j]%c߆k0F!M>>m(oP闀$oUy΀:`^pKG *'7 {ס':Nu@S$VX40 Gћwk= L\𭕯l?[LJTlr+S3+S'XEP=v1ePyPx9AZ%!GkQJ-r ,8-&xW j0DpR=c6q{|X%/̦B$J}$d|\Ô"$SVnүV~@i~|[@8N夓[Uy4`#WLw,];w!=ЉZ !M* pD{!#a:0- 9rugǶÁ⵳iǶ^v^C 6#L2 MglBꊸSAh0.M(?\HCdhG_rRbd܃JiLfM&rCrsǶeG|Hか*oRРIBx [%e`EH$!o0Vgdg(xHL~z ",iP&LR#1s2VfMU5C̦%E//v~!IcA%:!IGNre<QD8efӎmvp.lv`j5r ~ġMEA\)B8*k;u ?&\@5Mp@+})M4%c:[:rHPB0H]yW$=09GV&<%wD#i{`Mr2HG.H%IaW z1"b(냉h5e0тFQ4`pHo팫aWϷHz+h qugx}6e#/y]߮'q;d^JDg4E=p  S| ZӘGI1x NW ǝQFGeT ;A Ut(J6q[RuHvax8UPg}uw"9E0WY~#f)"劊 9jcjڕ^ɢ e61JQ8xѥ{!'qX*3jU7*nZYB~_;kZf@ԩtY=$љTHXQ˃YQŨ%) 0XK}zQ@>=}ɯ8}a=Ы|!oo '9RW 2*M <$8=3ZQB\d|aTK yx˭ws ޳q$Wr]FCqصrA ~:SBR`U KP$6`[ ]` i# @QG^K(BS /,' &@%6zsܷd =!"=AZ3Paj))@nН p|ҋqm׷j k`1,I}$>nY[x'E#ͱu[S7V%2* nQ Vyt)"6v,Ɍ?zy^J9 ">J}A {+ph8.5W=X&] ?s׻(;,bd)]}n7𥎏7Ha3KrRdf=m’>{EOOF? _,>1oMIXI3U34CUV_=[h.TH?fX#%tU630wªyUY?LgZ4`xԯO^q5z<Ԇ .s&<^2fTo4׺(E!֟ό,} >uؽR٘>6t$&^o+Z]οjR ]*חp {6*4_QM~tx;X3;[V$ RScCd֣`ߌx7DՕa1_WL4R !;+Dl*7JξP#o A2#)ƀAyin#Eý`UHs\ #4]m`*#mڟن*pK9m_^g\4M46z%xHP\!\2JA q%%:SQL$HrKs3RReO9LifS;VH,hTbL}S? -zg՘IDk1hFs+%e15p6::[>Lpsͮ7g WVg';z) .&ouR(<]oiY|vo]FFh7WCjw| on\3 fZ$|FS59Za&iù6Mh_IÇYI?HJs˺tsjGY`[z=[HEycu4I_1tN/1|_|K7g`>T)E1mwMˎ>_%9 EiR*=6)}ap@NcmPSY}PKĩhѶUET)[7̋%u:”Pq]9W%3їJT*c-krajqbDw9NfBJ8P$0Aa2FX%STZ0*u*DcHj5t͡T+6^Ԗi CP ib CZgYodΫDg3M8XC%ǛE,bETIz4z48JLjR}+Bp~g;Iev. QAJ+HXG"\JlB OSc*5(K))3\K kY˴8:z RR(&<G'#LB{* p* XEdr!^"ei#ҜkOau_&mkS+RCTRJmɌ6>)1Dc2{h^FBDts_WN˩qkSV]Δ3G:eM_s%l۟h JDPOˇ9pі U`hi)ISwр|[njl(h~v!d< Nq@X-#TcQ9E(RemxqhWnsج2.7U{`} ;.oBz c~xIZ EU(W10]o{_L7Y" ?/~&T 0E7|.zppct6{넑Vu4WceW^OBe,&}Y[]F‰uXTV^k1bW%CrVQzƌNng>Mr^^1V+Iv12g,zֻ0ח$/3=/gٶiyu>$cN7%a,Dƻ,*^u?ÍqMRiYmAԫ4&[ eWɫqWɫq5hp^1j5SH'B$x:h:NS IGZ͇8%eB=`UnȄ:eQ 5NojLZ7~ɷWR.:T'_O$*AKձW%(U0DJkr2 dA;JP̮^#VS^OO]ήd+V#v*AIpfW]1Ӕ}c5Du6\6e.S,Rf06}V Oo+a4%K/;oon'Ħ`>wl:Kة-ΦSP2~=l#Ծз?ZiJ5h)°|['w+n&` AE-6V~ݛ#nϷ8\,80sSkUtKk_]+RTXKv2*թ+5"G$(U+UXNT6TU}%g]FvE`Ut@[wW|9lEѠg@ké\t n x <5kme wzՖVMXsqgDƒqM= Ųmm)DG+mBVRKU(- sD^#>v` ˩jGXb*q*M\/crpQ<{E3gSo0 +رBZmTGhƵfsXEF)PZjDY1xw3v]lb=8؊:XVbm!CBYe1#fs,IÜQXB#yhvhOy6DD?.өUof1ß?12~,{1IHGÿy 2fzgb InzBt\&7 rܠ+71HI cg(΅sa\;΅IK#0uPr,:˹r]Ks#9r+ HmmM2HHHiFC,@yԬ"L|,u:?eO\&3,'<*!v']J aúPB}(>PJ%ԇC %Aݲϻ}y+sȊB]:KeADZ`2c^hU8`/i  XIdeݷ;G#c ƌ' u'Ktc0R 89(%Ȥ|Wӭ%Ƚ+tbrIQ3+' 0Ϊļ0'%oE4gV<7t7=gJj~^%zE`.]]xOcl%]1V3Bn.gwS$[Vdy2.F)A4ᐉ(뽌9KD[3!Pk6Ӕ ^&dIA$VVz `)c)N3%r*Q9+T,l"($,\RԔp04?V.3V V8"ZHJ$)}F|q I_b,AMG v\1|Z6~r6 @[TC4yJ,!D)FGѕLnL5Tߏmk US+KgH_|OP~q׳-Y]}fU۽Hh`ڪ[Z 6hsW7S]_e%m4\tg#+'Sp{}6ZQ-yd3Pm@iK3jOn9,Z3` ަQKZYG!2J)b>HWXc,)AeLE!z_y.#xNB'8 3i)k͎ފ]| ?? ~P͓}ZB'_Ds dt_a?PiIXRDLβ 2c'(J lJCY#Nq?w9} } t \'k2فGGc4]" Ђ(rY *yS𥳊ʶv5}"~ҕG9Y1x߃bn`9 u>quzi`6|PثtNܱHC^)BsF|}7]L 刣 v~ Hq&դt*tI%h ?)I'GH2IVЧ#a 8֕Qt &(9H(, .1M&7%>瘳ErNhDɛYSDJK` :ΣrIr3qݽ_PC%HPp9TD`tJ!9J Ru$\2+l[c (-w+o)GEA6F: @+۷ghx5󰰛7go榜wѓgbSZNT]<'Uxvp3s%յx}fY'gsrk9/\^ReJ{J-}l6{$~ؤ}`RKb(%2Pr,td*$Z4goYdnN&Wڧ!w) W 򃘥3¶?scr?WGߑsℍ$\ֲ3r+V(9ϚN'O̫O\[SnNN)@JJ(%h;O J |))!JW1/+Πo]9ķW43|zxg١p|/!oYU.7aUzd b$hI: 㙳!E8xA 4e;oщ,dN;?NZp`hPEoB6K-x5M7鄾ُD | j ٢uKcaoNecHiP>gb!IO:ף  +% *^ں(h(kIJ9Id-;7756QU!ULrVn'g`n4ϻոLnuPcW?y_k̋*߯:z3H\S\{w|xo+ؚZZxdVELz꽣jcc|Ox-Vw5] ,J5Q|jMvἉWN*D%a %[qrWMChm&,|}JbkWX1)uB'iқNO,xp䰆w(hv*f]u*o4 jQgKCHW5E7ͳS)L7>!]mqS.!xAjK %/>xۼ"k-CsĒsNw( Ƥd[J' .q|-OoTX;sTO1ilQ#i!ڐ S$_4c1&F)Y8—@jkQ8` -02,Ih@#D@mv)+U+y-Zw<<6 dIrk!.Y(dsa&y%$ >wA5]:M!/UDz)Bƃ=i)^JIV 8W(V܋<O1@6ֽRbIN/V 癷(FzZR@"l'A\bPQØmiCY' ٓ q{ݻz>;םђ1x Zk=E.TLxz".v~̒8'8/y=|YW>\@?{}|1Qgyv[8q30 $0D6&P>e1mk*NײQWF79>d&ѸB[0__nwKwx$GW=+YysДU\Q8=w ȗ䀘B/%3_,Տfģt$Uw/WGF蹔FHg5$Ze6ވq(k,57VGڵyy=?pF2؀(M264(F(B R  GیZejbwQ'{2(PV$p0/%>+NrxqAQt>HSAx6Cqq|ZNxZ~7l/l:~SGۋN^ivSc`9r<5w`ڟAj|$?%rZdb ܸw}ۅǧyڄb JӜ+s$ N;IF[nKU|UtpU|@U|U&uMD\%q9`2# <')lIEgAcuLG eZ)CP2b=6MVHKDYS1+e3(|pWD/.`yl[zޙkpE2mqswexxcvūSxWoF;steZ{D.%oLBG4@u1aYǩu^n0z٘惤+v˦'H^y0~:Uv󠕮wbl)ބ2En9Ԩ\ozuu,/ns>ц4NktHZ{Qyݼ^1= kj &cM ׬cgmI 9Zr;AfxUܖ{7?i[?eZ1cplJa^iGjd~rVk/;O,} *hgʢZ5wjl_l0 nC@GٷF.@Pܱ:ɲo$r{_|]z .m%ݜ^}Uzy 8$[\ q}4ܩz r=07QUs)Vt$ Rr""(քCW@dW/Q\%<- k~o޽Jl@H5|?CHj[OV+ݼ\ɝQ[&,vh64=oߙ^LZ'El^Xt&gt}zx S&H앥I@U(\Y}Pj>ӭJ&7r3/dn%9Hy l 6[Hq{[cokm5Tk:6Yos G4s%ʕS5 vOmd"ß]?@ j&(LyiU2%2JFV iAQ<]Z5P@oI*tNa2 a}J!й`:-RLp7pzcVUww O%i q䄋&TLhEN,b").:q^hŠqp^*!i9p8Ebm 3|t#d25* D%B:HsC:o7N?Op8. åh\doUD*GQ25)LKc5o .|2_7HȀ,E/H2N:8Qk#aQp#fQ[PjEc sڲ{}]fsָv|cPk܏9ܚ?ȹEq r/FMV4kT V0bҺV7qNdy)md xdͧ{@RnxJaVKEF1G)Duaa 7jؚs䬑?@: 1, ]_]qƽ WFpv.<[t/tD>@0hB́\ձbqjj /'ԜQC,~FwIa46#)S0qٚaL󈑝:ސFж+#ho6vGP;}ضC`bE;E4ܢ\|f7i4-uv65b%!#DG'O٠?*zؖ1fB0M912&h{f6O@fQ8rY04BĈH)h"n{ȩ(76r٠1„EGslݭU39 )9x 7R {_E4޵H~f+,c^1X3=Sf61(zIYEZmy+zH+FSxnC/Z͠nm=u1G Pk63(/ ÀEC~Ey,ʯ:<NcʠZEYYn773c j.PHx3:ݽԞ<#`HbaVFЛ?q|8j񙴃 ' :. 3=gپil B=A=-Ӎ ]+3%W);Qpbg oG# ^C9RgP`+?u &]557wh-__-,[AwuJ/rjŘzMZR+]oY,GBװJpUm{WcI7jEb}.M49klW!A ´VzUCYEB*S GH4EKTr2HG@RID\z` rH# @>Pޤ 9#&ZH0<)c" #gk\0!UUO XZp`W_ass;,LYwi,gy`U,RfQIEjy#@C4*kc&P'h2:ڀm)b0h% jC4SMCc4>NEU\gG_m=e<ezEkُ#N}n&#ol2JOu6kVu unKm5Y٧U^Nj`"R"~DxJ&rєLꃯT*z9UL |j8ub܅&6k"PBnMnQ8x[– ?:rIƿlAcʸLQK)Kd(F%-HahC0LsIZg6)š ,5t;vE!ǛQn[Si#-<6MZ͍䍺ܝs8!DagORъ" ; G\V"lG"\mV0a#mR`tʨv#,)BS /,' &@%6zsܷسS1Z 0#; .MiIɻլ_-SOZ,MumrE4: Σ<24Ylp%2*dU[S&t)"6 v,iO3?]V,=yn+0@|$%RДT ;G#SN$K ~"zT|>>Ro_7dB;!^6!%lnKȳ̆i6vv=զG17Ot\ [ ?.n:oO}^Jvҭ۽,3%KLNiB%E K !'@' +ۜA؊9YJםXŧ9m[yGDJNcR@HaQiƝVc +".MRVdbkx=t=V{zML@S/֝D-C+a'0Mp%$-,=ϣ) Q0N$rsmM,;d)S 6ə &&`l)NsbQwZH>k0"%LpX[)ZDeuU]_% q}Bdxʼ wTJjQgO J ]^^o,JYEJ~tvj].&Αx:a>|Uo15y UɫżaI2sk>vz۹G^d\S}7n=-=)T~pUO!kaf^Xׯ`5޵=ٹ7 H^AuX5bG)5R+A|~Q(Hj_}o *@&~uzu7 ߀:zW/ӟo___]= uWo^=qU| GP`^}]ck*K2稛 ߡ_syMhovt˭o&KȀ :H-]Fhњx#5GF蹔FHg5$Ze6ވ6!(%PaYjn 'sZWV1l$ ҄A P*Ki'8z3T&1:)Xhi3,g:G2&{ϧs=zv Yt0N(i_J_tE u{&xKnӢ$J-0y!NjV)/eVd=EJ{c BT:+XFJaH1,Vh -ue5^ư s6ȹ [Ȱ# -J)~].n-kc3-i)G7?z>E RY+g&x2}:iMx~: %~#HFJTi[/z+ +$M<߫H1xy,%BU%Jsy ֔kV: !MwT}ba{6 [Ŧnr4;˙2RjճY"eti:Zcװg=Ɩ]}78kʉ INN 4*q e0m??UҒIT ժBlg:>/~ʨNZ)H3NBvtj;Cs+Wۯ/gfQ!M=ݎ=/^W0msmq-\YkZvZh@sJbHB:0IYOq7\iG钅K- xAƢc.䷄p[B,Z1X xaZfBB E={be0K"{F(@C +S`C«QjX8/'1_Fπ@ x Il]1*٠;8|^g:S]:jP8ѥFaL8wػ:YꈬNuV'J٠xq;g;y&B!n5UƒݩYjuL"G*Caj3֦2b=6MVHKDf3;;6c2Ck?:TbDEj)k/Qct>8D Zvm=?b&jmz' sC"OMK2R # l,;Vƥ eՇ iU32 hG]r1,X Ҙ0;aԷib<>|YfHO7I<4hP)oc` ñVrX2XEH$!/F+gd(QM%#1S, A94a#˜MWmA/ڂ_&%EY/^O@mbYWȝ1 uaq DGpT5rWi2j;Ѥ&R1Wffa*:O?{B3s e0G+n ~~_㸥Wq.13(pVnٙAB]³4 @d y@`*VЦ"p}zc'y:-i@óW?esQgS/'+EvоiQtmr2j{ /~ N{ h>p-_ ]2:wR>SLfzI)ѯۂJw2GtOwQSE#+w/Δq 3``V\ *8 f@yvyztyvDyvyC<ʠI̽I75A- AR" n&4dN!c\FY; ZDl 0CJ/h LjR .̍g)ٕ}߽KBʰ.^fѷ˞9" tsdрlSbR깘an|Ǜ9zzGҴ ;Zixv()""GPI)W.zIuNy&\)#8s&ؔT$z4Ft3ί!Hv,fBمw'rFF?ܗ ͤwjtlo>'-.u5Ժ[o&m!:m|sn͊ ,eiڝGyx;cs-7ð@}ٗy2lo!D#CF(T4Mw-~,)|N'w?')T7")Yv߼7/j5PAB)Rsx(J+=udޕJHc1F';HCl >r5꩏;*>t7ŤWOr,Y5 "kj5zlbA U$5L~1ˆ*Uxd!R&R/5eDDL 3`pH3^ /ֽmW"N}1{ѭΗWrCsH9,Rg2D'6ا7Cw8 l%" ,9vf'߯ؒl٣eeIv4Wb@4P2ZYV| u:J59Վ@-3>k}NE_;pjVOggoϱ;ǔuvwK=Բvv8z={r}?ٔ.p nrԱ.ڹ9r-'%qAC$TTPEeIcA C9Yɥ Ǜ5.Nyj9 }FiY`i즭z.iYZ%N~7mRa77gW,0gWU\'`NXJ-W"\Y kGy^E LY-`+w5{e䪒e*W+-unw$vZ6@n1̜ç E<rs : [/cdrw%~NL^z7rk Y4$LLA7?QDLNOdKh)!8FUQF QO柯ɣ/RHW|s>?3J-!8ǿ}~ǓzUe豞,2dzQϥ mj-f:G!SK>N,F:fQ^ѩeղ0g6SXlGw%Ҕt6"S,[K3 }pN:_v- {̙,; s*cʬ#)M"):A!k'zKx+yVƽսc{Nz 59ko?|l,o~zGzJ2NDWA.;o5xYQIBcBI ,1J9!9)fT`K*5j|VHLF6@0$GA %bNBAnK҇Y:-)V1ٗ17nc|0%9U,LlLi04qBlZ!A5 *׭sW-]o)Yծ^Z2C&MZ4RbV2]R->'] ډk`Gg* +@:!oRp}U53wdU}7ﬖ> lǽڭvN3w$gI;$5#C3XDy~ixqM-l2$RNJtRCIj7zB蟣NO:Ӫ]]`LKmiV.h䠛UhZa [Ca(SFGlL Bee,2 2iYY+5 u G0c&S]VXI#N}L}6EB-4 ϔ!ʚAƲz(-!US)xl:^MhҲ~۵hP#ȽoO߸B(&Sꏲl~@K2è흼C5N?~LA4*l"_?c-Dcs{m{֟G8|*SG:&(K:I$JN*L-5dB{>琳]N 11Q1Y*&p }$J ;GzcV>!FiZ68z7k$mI*lNCa.9JA)3f2 -cQ2+7GD([@AʶVO)El$^itѦ;G/q7|5mSCioơ rٹiMش,+j]Xo6K[Ͱc CVsgIqZ!Ǟ֓J;֯K#^Qy-6i&n\flyV5_R(Pb*t4K0"_R*1Z9e ,S-* >3r:HiM( R}#coP.7b Հ{̸#8؆92e懿8 f6-@;L٧"O>XD M*Z)/bkl:*ljə &_طE]\=]8y5b&'b jP{`ok`O C Jm%d_ lMT Ke^c9[ĨXEҰPIgX WSB1qd/BL& zy[~Yd|c_D4=#q@ĭhC ;QcF2VeڰkSPEERh7h1'IިRq&H`Nz=ie^e{F!:󫎌fi\MKE3.\ܦ1 O%f u)$E uT4K=.O{ӎ}{.lke6Ϸl4,O:۫-(ڬmُpُڂmkFe>C'ߧo'fi4HY0Lv. bC!p:Q O< R\>4H}iX+"l@*[ȓR%x]-'U4 U&}+V=M噱b@L9& 7Щ`T$ W&ΎiN~rJ9z]4-ۛ}ѪӦ'f&2^7-I3.6K$?2 utq$u"/p̚=kҖ1<)b!k'K2F(9HUYr:Q}&g;L楫2ofbavgy$0qG/2\n%j!qmLZSneRj8rPn{WP=64nngRa5$1qYM4 dN7Wr\CgÖ|' Rjg;QJf<Վ] P>*/@9l0N1J;DA`e͌n:o{M@\Aﮔ^gvzx$|wpvaC|/1oYfSIX)b2A+EHƎ`5:0Puy0 p`HCmΆF$z ZJZhU"g,e-% ~{|qӱ;vsӌ<]?oUnh-vۇY됈6 MGFzjAD(Jt2 NT<: =7$ O{[7syIr9aj*Td2d-OC<Uz$OրqV/gE 4I "T3hwF'jX$Qzpt`ZtPsB=>@򤯇lB7@VG -z[Wt,Fѩl,)ML%/}h +%*^L|05$Jyq^lYO\Fſ}V 7 }L'  jry3.m.Y_jtƏr^"@5qs &,2g#~aeY,,HdմG*[Z3z{ ~ ?NR"/k|f~X?Z==6\>Ɂ1 Pp^!+U.?Kٮtݴu78~bTPxiH0(X|]sWe *+GvbWRu!ϥiqM r*kAQx=!40ݍ_7ծ¿L;>ꈏ{`+w╫-''47{S*_>@ 3{Ih|vRCtKL[x=4gvmΥ5 Դ[<-:6~7mzLjZ3)&\n6V]|5/yl9\Խ ~_rJg4}D 4ɌH(Oc6³h$ JEQ$q,'ӎ~o/3SC@t r^RM] 琊{EAiN Yp:ZrB֥a y^@ߗ0_t%l6x5.sJ)U}GУRsQ*XI[5.ELLYT*lɩ`PvCLATBE&ʃ*3A~<&^H4(EDUH"F2j0%&%`  >M)pZFe)A@Y/5> L 6]N`ni(Xx>qa]LHz!]${k֣ær BQ"%EK !%@ٓbGm l0Χʶf[\TK;"E RrH@* J3K`h4IyTrvNJq߇Qu2я-;] ZxV9N`JH[ =ϣ) Q0z^\5"H!P/wRA:l`13-LL`l)Nsb) K  F(9(9gé<2ESHB4\hvMQ?&ևe&׌5CGm؏_lYcgɳ~,);y/5=ZJOh8N!IGiⷿbM~EY)C$.|0" }(C >-b@r RLݡNù^NgPͳo`pU P& xY\:$U\M_/aTޟ& Y}B[_XOwC,.LDk{WiG%KyfLɏN`kaiצVċM¿Oj“+UE16uQ7O5rQZ?yU=x>5,[1`gR}KΑb4aa&kYf7 >ƕ~jBOf&e$h$׍nSq%nT>O\z> 8ImC\kyS7t*'֝ F .΀utū7OW_9Dͫ+0 65$0DL¯w#@݁C݇40^;4UleY7 øqoh1neb˭wG`Rde{Xx],A\I]|ۯͩ:`Z*s_Eޥ@:x}[>틍u4U6Ww߯s)$XϨ5kI l6!(%PaYjn ΆV啕|áF2؀(M2 J#T0) GOq# xE'--?rө l#KCQu6%~3pFJTI?*!mVlPmQEYXoGCJq\\ljKe9. Hy ֔+p^ h?]Tÿ~e @w>~wix/o R̴C4Џ~ig Vc{pDnAS0eJBŔFQgYS2U\t🚏*n*VժiS @_0KL}Ķ]VЌT#_NgdoJv)A&xX{.0{{%a.k+KA(g$eCXfHQrȧz,XƈtBEB{ٛyiYR岤~ gr2M}41j ),,W(*9 (#YZAC47>&tCBij?1N`غoyH5!Ҹe '5t$Τs +qdJ)GǙ3Yq&d™8Q]HD so[0# AR" Y0(AWI2hFY; ZDl 0CJf Xvg]J-Vz|Vbno=n ɧ)'0{P:B-#Ue}' +^m'ʑ|1Gԋw$(MK.hQ9N4Q/;wvʤN7+$:<Gm M9Lif#H*= #LG aj3֦<2b=6MVHKDfoԺ[pQTceۣu{nI^roSpnمwz|1lRߺ[Яe0 mO*ń6Whէe:cݲ]IrFWsh{^j&ڧK\l(v=n!CM[:n2sYo`vm]kF:}~8OjC2Η-6,ۜSzMhӴ5O$ _O"ouѭ&L?ԹcSg+3}3Ϡ&w"qy2Tk> w#W\߿~q'j8_j*SnT0(eU9(&/ L‰?9zp\?5+|tU>HΫzS>xv6M[֗&_E^,Om&xsSJ7:.շUL㶬p~0jh_Td41 PǼ W@Wn,.ɂԋ ei9k$\+%VC3 dK!X:QlFQ\3%sȗ6DQ*A`vmlln]v6};sٸ'-lZ( FM@JqÚ SDFT9hG JHԃ1",DDꥦ0@<)cj 蝝8ƕ|q{E^^||G%Z٬nSIοpl))Bdzld YiD \E5g@%JWоSbN d4jP7 Qɬ;oMH6xWwR}hKrR=ʻl&A_@ii0;+ Y4p\vQ*Vw.=Or]3;m]gs %'3]^ =mH퐐3P}_3rNZ~|3N^pKG *'/ {ףuΡN!ԩ +})ǪA("zz-Gy #n5Uƒ,Yrg[CP8/~tt.ADEGA6U`1j(AkGj8SXU#s!,$SR[)r595TɊ~^Ieׅdς˨$Gd"̗s>Zyd=:8Q#*-[H؄ %cGSȜ$e^̕h-R&| .-&xW j0DPR5c6rk|X%.BY^ޫ. v6Ox 9-S7N/~ݠ4pXcc4 T5( 6NnV: H\1 I0k+ B2 @'j-6N0+@e) Ӂi]I#95;ڴ=}H}H$U$%1lMH`"TZq5,U (>\H☁ 3 t r#(XJ804&>Fv}XmF;c}5"ˬi{1_4hPoc`x+9 R#@; #Ɓ3uId$&=cu4(&LR#1s:^.rkOg^8m!tיKՋ"^d^ٻ6$W ~Ŕ!Xxܞqc {0myJ\S$mbFK,eݶ[dddVFd|H Yt$皺!Y#A\5JЉH[)Svzz\akX}([և8}xbŨq ]UNE?>S#ShobXs7rrp.^@΁%.x36Jdh5nFfsi,7CzEmةYDm9T$D#i{63dD)]:IJ"ױ+]Sy lxR1q`*e5m3g7e: 9"qtD0[3dv?0l^jC''}xCs^M9ZB2 ɯG0&a2ZoհDJ>p-_!aS̋D/{,a)8`lfM )1WD]g7s{F=ಊNþeE) #?_W[YVGb}ue5%jm<:-|8 oa濶lcFּ7)(B *L;}hyXiY'O s,yyy"]C` ZG"$Ԣc5KIHm W}n1:jK&@~&KR6 Mx;Y d,)"C A0pcf>1Vz  \ʶ6^zMv³UNR)ϓB`ʩh=E-c^}t13eQ9A-ONqM!(l)`LeR$MA}`Zol1*k$ SbRR쑖 r&LI8FҎY | )c4KOG[6L[c mzF㍹}rR`槣ąiHtS=А!Enx-k6C<`*EvJ/,2ıp{" -Ȟ$+`%A؊n0vlQwD@4:& 4Tfi%1 ti$>^ < *ł{L} ~na=JL@;Bv3ϤBңi<,=QC[ϲwC׭ښ;d)S 6ə ,LL&c ҵSM$_J@5RhRs&8I!RFx T"04 #m(>J8?DsXfrX>'4?x5Je0rf<뗂1S[^+bşPwpBzXnʏCBIZ1/yKES}C >ͪ]`@ 9d.k)(]4t " t^=I a /K Մ O*0!ħmd|Zxz]66)ZuSEBQR^.`1GVZXR/6 3}&U\-ͨ"jOژY>xjmaFRܚoaoʹz9+Αl82OJ|xbraHxcOo鶩 +Klfu|a8*`Wkh8'z۟H^A64VͩX4C-#Ep%,&.OC6YR=|QmD5ATXX>\|޽7W޼xzW߿0`Sh`!h><u׷5mu ]SZ9j6}a--l-B(2fO] tв5gJVޜ*KHlD-W\)NkB()@,% Y;Ѻ5 O*H(=IZD &dZ#,K 4AHOlU^^Hp8`#l@ P@J<ѓ@ሧ8I4*F Evg:UQkk 6=zه;lgc)lgYՅZ KiSUe 07j<[ 00 rbrX+v$vk769]EAǙC9bm?z~Fgs,̞`g< W>RK'Wg8'-;q5a9!U;GwNTy}>zv±Yy':xA"g{c c>r4I4,`E4\вLGCOI7‭iU"bK(pgqoR°TqfzGsH-B/.g)xOua_H!f<ƈjMq͎׭0fts٫JZeJyUgh!ѤtWfB]jUL?#LEFKMpyR>kJ`6DZr~Gq>-uqIs L 9,lL KIΊ]x= eiSeEa (@L6 m˓#x$/Ou*<[|)]vq]fUۯS$~Yv̯úEOq3OR^BۿU,ˠf"V"՘ݍK? dd觮>ᢊv3Vژ lJ T<^xqoC}9dA868 8}Ӈt9y?y&uxrS//>]nO%|>S74[\p52EG=c1棴T#T>)o|ןR84q¸SPM3|VWjW)#g4ززkMׁ}&(NxFyN#!7Z6Ai-Y'MʫxZ+K+q~_.8$->V/C ]D!B$GMku<'%c tFao]K-kwgl|aƜK-gb|^H 7#7+QPqzJ´)_&ZcW~"f^#pRK#}Ѽ?ųA6Ibϧ|7=hh;&We^F*wM*a+$C!wSA[c0QEe龧Wѡh彳uke 6"L:!2~_w݂ sYx[޶Pt'|LGL%~1^o˾>qӢk0q&ƅ n^2K޲ `Y,yGȒ7~mwZGt䫱3kx6YG)Zgl<8Ǟ71A]uХN>NXe|-z"'9eGHqa q0"O __u6JneZط&{/YuDP^)#%bd׆dSѤw& hmJ =xPvw> L} %ybN %um']:jB%Ѧ(8ۇ{҆T>&jn.1^^al7Drd4AC-YF}cc7]z~{M:uVßVuğ ӁL: ?~ѮŎwqQ٠)knBtJB];݉ol.l}:AQk@RG*M/vpd1*>[dp'O~"͵2q1>=d^2~菷Ag;;R?B%z8:"^4/ =mL>+ѫ АV*~CUA]q`v57#7iirJMʩMJRL1-d`)QS<0 +.H׈bw2)W$Wvˮ'Wxkr5)9\ E7\ #0Jyɕ Wծ]/%1#tWWl bńwW7'iπܣ̍u1)2{^7_1tXቑԁi}oM_?nUj>tJ8ĻzB[6v헎ny"fo-.;]W y͓x x^4/w"Aȭi3 inEoTC矷[6_o?'\9KY n{2{avp=ihqەlAFxL|,}yj \1xjLkTSGd<5DI%kYN`ɸJ"WLkDrŔU&)Wz "`xW bZs+t uMk;9*m>{M/MY>qm|Ȣg мIܹռwť9u=0xTuzJ$}?ޝ*> e xɓ!i#f~g:229^ 3H rwiZIa'Qc)NR IO48k']dzkm~,7#-+ՂCfHVN3Y^d_tvz;½mns`U֞ݧgzj|ؔtF^B1.%:cZGgLu&-BArEnp"WL_cJ['(WVy)MAr^#W J+2z]*W+RCIؖq]14ʣ]\X\K>+uȕ۲݉Bj0a2r5 G޷9v(]frUvz ɕJ#WΕ"WDk QzPU&(W;j}劁M9:,E+TU(WH)}Arpa{h_:rŔh\MP(o!S1z&ͩ OѠ7j㩘!F\A2; Zxe)QTLku[ڐ-"v\LWꡓ=NVGQ|ZvmA%bƦsN0uKDI!?b&v(gt JZg`_Nh@^0K/WLZC:A3̴. gT\9K]\r>W\1-&dZ}0HR` ʕ-Hˑ+նbڱ tի+e0`?AA G4G,9rBUvziCMR @Ԧ$gbjQq*{T<] kqÛcs rb\5UJq՘tSVjGq+UAr;v AnS(V\)/P䊀*g"qu1aLTr{"D(WhЮ̯8O=ռyRˍ+XL3S4Qg%LyiDSez26kr7ae]KoXc2dSem j"ˬ8X{^μޠ5l;x(b\J+\uȕu'O؍~"p y@\gFa RB^r%r.]OZ(HJ(FWC)rŴr+t\MP]0JQ\1ҥӎj^\MG=*Hؕ#WE)rEF)U(WBɒ䊁}9rE(\)rŴ.WL})ʕr-;dN=!ĩS!O7Jk߀vs5?"Tj׻2t.S 1^MA>[16w)>} ժb\+K+uB.WLi\MQ,N+WsT̀v;,H ؏^b.RdiQ.LLOQ5ky=) ,C'%֓`_`I!(`(0Z3Sj >kW^4|r(f}i.WLtzLIrEb1LDrEZTjr0$"`[fIL2m% SȕڲՉ0:0`?r \=r0Z)Fi3K!ȕrkK% +(+"Z\1en9y\Gj%28׌}e޻bJ\MA=z>WV>^Gb'+kfk}:;?-6ߣs g??4 tͽ;Od]_Ŗ5Y޽+Eg54Ÿd?;_8?]hӗFtWo q?ͯOq7Ἱ!cY]hP qużiK ?Fo]:tgw7|;lNqe>;գ/~cҫxjnujghܮ{h,LxS/FKx⩺zǚ?s)[q;E$$_(}c>w-aCw7Oȷu>){:=->/o ܦ&^ux"CmN[]$S1hhhlѽqAN',wK$"K{D{ޟ&?-P3n˘~`D%nA+)i B+ڃ@wZw.CZ.)}k%B:Dm@њ m zEFфn?n͖$xffbL#ԋH!<9x飵d,~ %/ŎnlE搴6Y(8Z'}JzI1IfDo{(C׭-tk^PC..>^Kɖ^.RXDR<),z$$D=I2-Eb–CF`n$k&םtoTAǖ<:a'}׆ k{ (ԜOQfU^܍S5ԺVDC*)IV:ѝ(sRB3 |11Lɇ%Jѻ)D$6)hK si_!'R:a@zi mDiivJVCJd4XLզlR\=D@QOI=i^,>YЇF.\>E=uk,R`榤gE¥: V@&Y)dWH! Q S( ٥fّ#Dȗ*1 rJg'`b |ZC:m\qLOJ$[PyU벫ԕA[4xmBV5 e8xѴ<@= KH`Ge YLZ[]x $nGfp6*YȩՏDC}^jygE3ʃ d-Š.k Hb"a9FZ]\ۧ}2di"YrB,GKе 2"(Qc0)G;=ŤuԆHT—Pw}CP=n;H HjLEQXRr,a3m>%TB;뒄 Xut kR([|Pc` &$˽e;VE̓pD"(YC Zc~,BLJ35)JL T ":qkUGPaRB]$1#dlDH!(6DUb|؀@VH'Ytg#MGhTf%ݢZMnU{9𺏂orAVmy6M="K `n ڪ6< 2'<6V]χ,L6j9Qe,^5 Bztqt*ih$ti#ll% f='\;-,F ݚ{)DzI(ΓDCkBm1&P.z~=>62#bpPaRDluHmk"* F m=EדNJdt  J`Q Q%fdi SRtLxE7m֣bb V$⤩MkurSp͍r;XIy/aèIP(EeQAR܌EEH,{adzw:H)-й'GJFdU8FтΨ19MAk.YqFw -jITAlRԞK=e2 b2Qb6#TK.dP?uN^y~5kW!*Z>@MtVkA֫ѫxdmQi`NZS0h¦f@ 2)/$6O34%0h*夵BSp*qKچkCWQ[1xx@A4DmM\4*wY. b!C1 PVR&T$8uS't\- 5uhD;S`]pBG&[h#˾z؋Wv+fo2v u3#cC>lT7;GQ!x]Av_oɽ_`ڛK,]n%vbU4vvIo-Z[ltz=a8m>_< ]a7]gX]qWɳopu~-5]i}q{y޳+KgCh?tq6nں\/6{HLzEN3%όg*pvVEpph9x'vh%v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b';  ݜ@Sq#h:x'PzNc<, 1,u \-)P[ ͇Vn֫?ЇylK|}u^bnpoiB-, zU}:l@w4R4¿[\*u]]./7_{t걯9rOm=TW7u-rwOmgUV֋Or~v2кEC}N\x8qp Drm?߽&Z&  mɭ@9]i7\]?_S\쨌PzҢ+?7}x+kOF![2f$7"\t͖8v;3~bM5n٣n=i/!H&H6A Mld$ &H6A Mld$ &H6A Mld$ &H6A Mld$ &H6A Mld$ &H6A Mldњ … Xj9$uq.&HUB P*>CSc-; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@R$G`?npq)qN @`v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; z?{fɛ%m5Vo׷?]Z}Ya2.ѝ<3z1F}%3ٸt ƥ_ jFtgIw ]ZN0]]'1gDW ]\7[88 (ʫSpp{@[܆@[XgwjMm!jSf!g>x׿_\۷V3r2 [sjQٝqJ?˛Ŧb봬ya3.WnDǛe=E|l-6ݿH-VKOݲJ=Ooy~uZw.}FUN՘k5{hw ߽4l~6Ӌqx[߹jܰ7yW&._4w?зܲB?n=_\;OKBw3v떧ElȣKOlo6M>"mCWtmT-EkÌ3泱ps)K #,PY%(gCWklAZsK߄Z1ҕ7B5#`Mpl m8}P@i:F ޵6cb )CA2lcg2a[۲dw{9,],%_-uX"#I96 }0pUŃ ,`k}jW^}ؒIU5?ZcW}ےg}dJۋ' z|oCӬ{ƮpkՃ ^ \ \Uk_zክtp•49 bWҠPZpZo3=\ \)+\G ^ V+UW#\i[`UB \UkwV~+4kl 4LO8JI#.{|9&J,Orwկ?9YxZWÉmo\ۏe]pSm2d ]ʨu$g,!h%i# #J{,~ϰZ2+^rluAg\{]^ &KWt_aΖ{Y`3B}^ctwMN=퟇t]_&$OV+-pgKGQ hpz ʈ9mvKl/z"=z6U{֛͌|l_Cn`|$Ltu2N͕> ~ӳ = d3))r0ֽ³Z`u&R2֣QޣS%VnԺӍ3>RF_,<(>[Y{yt9n_h/Nan{`f(q%`fֈ#r4`GFϣ(߭-n&qڒŵ܏v+${ѼT ۊX\~5^׮z]29 ̀Wx\[Ԣe4~6\>]N+u[JF/ǕCm1onI^mwt{iT"\s?y7 9<'6}Bh0Vx騵xުqLwg+yj7ԡFl6tǂS:ALkFt=+ ;=b?]ttn*-~K4;Kؖt4;j&\ \o d+S ~{3 W*M!ME'[7r<9QR87:ܠ%`^S2wqU߾ro%G΍`fڑEvh={+[񷬳+[7|u2hƌ\1XB Xڈ.[UU.z "u4Ke1ĩngR$ρߖSq!r>jϖ(K~QFfօ%lolb<5֨&jw/ܦ+_3bT-$.iJL7s&&ͯU+YڈdS 1#z]%J $]d,ECdJPS_6Bi@RZ*O: b7$w 4;%{uǾҙ9 {x9JY[%,D":" ^z$!cl] >,AJ9>)L6k-/4KP9$ƓeE#l"ØKH(%) Jv7 .DotrgjFSg0wc`D4uVV/&>AkMYdY!lc{cJ++}mbn,3M9*?Oukgrga&r ތQǿGЭVg0|@ı"*&+UkU+M@{3 TY2Q'WO1g !Kb0M$ Mh4>@9*Yvyѳ.]7NJG21M@YMv(EP*Ab*X1(+r1 “EErZgYSA21v<ٙ9 GjOw2zYyUpVVp0FL)q_p>lYfϚiYߎn_(-bp*I|](ȃ1f䱽s9/ډ]n|fzfMnH>~rB>IA[7-J]ho;۞i ӲB /ƴnUƫcdъqAaT !"=ꇡvJMIgL$ F R`(dJrCEٔvTDWS8]L&7,ACHayx1qRf7Z C#uWYy>m9T~tE{=B<)k!B65k ֙`(9$ka=8݇igO67$`ZCɷX-y x4ZM  E͗ɧ+߁Vmt`IL&"lA&2 B^S"k KN(G *b5d+JPb* .J1m̜GƠ {(͎jmǛ5d`4&%SIޒϾiv:[ O*S "yܨb yQ6e(>)+S Nd=95hrtN`:F+svT8iCCҤQh\\??lx{vrkvOl{|5Au~Nmo/f`ThS}auwn67s7Žrz<]6-g%&+)J]̪l{gHJh 27xSVE-sQ},b.iJ0Q@٘yUPFyv qΰ+ʎPXXmvYaviZįnMǟGl Y0?cM\rĺAL)LQIf{xF'u ;*S'#CCMe]:Z+"'mITD;ql̜;GyMb jw:vEmգv`_SJ%kٕܠK W4k6&"KWahhQc]H hV ANcM&l I#c_1`Dr!t0~9Yqo4}WcWD#G{Cт$zgyY: i`" awJ+]EF,=`ͨY |IӲT9&dɂS)ک@ ~Ljؙ9f9#ȸ8mεOc]qvk1 ['iu*#9cJ%B) J[ɤZXS/xY]xc @jyM3:oGb|wG3am#7~TZYcHgx͆\ρǣf쪃0$VƅnJI!#&aUNԇAP0Hwaj ^AR1FD')Kp=K-#dq.թN6/jYffo &6cPzV-)",lBc^ݙ9 Đ>M{-樂ݯN1oнnԣdF[!g]GHtQ:i4wI9F0N%ts)}{Ag)Nj2 ӠUˍo=\{{ߘPf? <{=Ξ/unғJ%Th`Z=" "Զh=0bvtO)B%D m42'-L"%% "L|TQ=B~frz0DO73r=[Go y1S|L̦*edɐ1 ?o TҰwxk=58yc8xڱf0{ȒEX|"$8bv֙hEIK ~/Oo6ⳏ]J2Ike7 .@:"Yf:Nh7A z>ZtxXg :RV.:CKAc-!THŁ :e(UQ=#=Sq E PR'GzA>g12}3Ұ0| Fe<9?}]cWG1/uk̋oXQ~1,8'en1`vv\D']p9WU6X4\|(Z<>nZ:n梢Pr;N=݆wܡ M1w??{WFJ#_l|12vda70j"KIN__dIQKDYrғ).OO5om"XgkR ha/xMjj*Zɶuu ~=w TN%PQ3>IRk;m)&)[*lnG͠ joц^A}6}3X]FRPܫT5;*Q*Z7(i^pݯk5)ra]Fֽ7(L "vRvmfzE0av^nS=c[U"9工&6M-{aۑ 5kgוHUnvuoW^+JvRvU)VUvij)U-[c jn6!U dH0Hc> Zc>SWFZ%6\L?XӋ_WNq(23D|7D B!]n)K&'j[—`OMwL^>qX#QS+v(6lsz*IEjZjJmV )ʠn65jRɒt`4d*Yw`}|Ƴͽ>"~΃dF$S1VF}HsY^VH%>F@#Iu+=ˉB?)!C v.伤 »A!9Q.XfEk9f YH@,zI]*'azw*Dt'ʋ O~:e cPJ_S GQ,{Ih¼$$N`%gՖ Ό V</.jn8 >H@MUSl m.?t7Ox φy'Eќ/U~Y=yXk>-jR[~<ԋ4P/E*'²W@\fy at?z7}uy\4p?_^锋룕B"&K,*`zb085D6NFăti';H46\ 8)A2FN5Q )1C/s)i"0(W`JlB Oc*5(K)µ,n84M- %f $7-F~; ]tLM$+hHx$KZj4̪aPT`!($E֠K !:0$XyJn]V :|P$T_ڶ[YHwD@$TD2X ̰4N+ Mw)O3 8OBoB^aQi}"ؓu|Q a2)[2; ?F# v:RLݙL­?ݸ`exu4( -~6|a`>Uᡸ3"R^_M}*tqq>;dF0Pv*M.D.tM!%k=)FUAL/Ԕ.$YOT~^7f+00n뱥ql#p4>-ëW~lIΖ_mW3Ig3&Q> @OۼHVNvWͩxӓt:9Rłe4vI(FH9>Ũ>>_WJ?n]: ׯ~~:|?O]b._׫wo~O0`S8!o@!WMկ7ii6M˜n|v v>LY1{f[n5(}_zfR CPЦs],r=oR=I*.Y4R)Nuj Q}:xэs Wgh_7H6R~\ R!IQk0';'eH(åN(2P C#ALIbV+nYjLqI\V\ ˯*8 ggǙp&;!NgCr4I4&dI#(K2h)mFY; ZDl 0CJf XN7&6u;0ןso|J5B YG#AFmUs@U.Gڂ 2Fi<>('ޑ4-BdGQj8؟zP&u~ND\%q9p>jHhωgJ3GRY0U`B<U S띱6PL"^ˈiDk45[!-aj6q:DQEwrolsh#,mg)ay70PJ㠱{4JO)Cw%p,3={n] ~;wbL;Ae9nQiwZ8ީh"A*d42%c((mV0b*Ljd<)V2""&ZH'`pH#޳>ƕw燓gzS(݇rdVkgbjwKx"eqvv 3c#cJ#b*:ᗁ,Qryk^!C&YPWsMAude?e:-YolWI:WnM7޸~8ˑÆh"/d^"KJ,6>@-'T, 7U)̚3(8)>La,pQkT*ݤmmNr\v3Oںj>CqLq׫|ڒ;ٸ.u,ggŲk1C}bZL 6 0Zb w,]a1r ܇E( \s峕W_ ? GIUؓ'''f 5w V psv+-X QPzHCRT=!UT:)a,PreBځ.\Ms`)*11Km*CA ƙARLz,RO`'IDٳ6$!2D{E"j3{2(=f9%}r*}0sdqßS9~⨖dcn`}r16_e%ظ9HjNH\ikr #@RGTR_JmDtDRR}^4u:W}P}PծwI-(`0~f.ͶGZGBZn<Zx)+?-?a39n U`hi)IH[݃ u\7jQ PrH ilcJ1Ab'0 !,qFs {ށ,&GigSFN2lq^"[I\XG :Ґ`3h7r#Ω7\\[n4l]4Q!HKrwmIЃnяehwk 'T/q!f~ t`:=?ņ^j].dDJ)MhjfL.NRmUVKlXH$YoM!ibI%3hAD6=JoX)9JH"yl. HHɠڳ@vcE 1okm`'_P` .3 V8$JD)=%@z ?Xj˘gc v{Un%c1ipDL58$3L١GިTH%]4а0YNwkUq떵o v :;Pjkl&Zcpub{Tz._evu eվ F li٘$F1 Ÿ ǝQaN OUR'Q#~fdr5_ߝ-b0>.#4_._?}:~S5._cEbVt2AV p&+x9/%4~ Wz} }<AM7,ܼ|p:};7_dx5'&2#V`Fc{G_^oEjRL, q /SXwjHo?Y#hɪlH(iɱRaD&z`m&4՗JkQ0XjPt1Ƣ( E?9Z@gG͔9jKTzLy/]kgI_r!n,}!ժV;YPj4"IH2.+$N0 a}^7 e(qوʒsL$ "Y2rR&} (pOy }m(d7mmK|㕅9AOb~!<74뷏KQ"`x~6:3 4hUI!ZEP'ꄃ@*s&&[ J(:gȘT(b$l \f :=Q$r(@)cL M~sEe,)M'zFc,F}H0tϐW(g~ux8a_AET08 *X6eλRHgBeZ61~kۜ%$[@V2E$l%Z}]Pu3,e ]ݽP>;ˢ|>y)K܊J^^٘ɓڼ]#U׷yrV HkY\\2R'%kwyzwHY@CYj;[%e;ϛ}$ja R ffx[N'ԁ/9Drьeʑ201*Jo9;UzӅqƾP Q.\QTmvSqG6i|{`n>/&FF+2 |fW6zѦsd34J[4,k._`7N^@#Uc/2t)jjS;$@ vBQ&  @/rk45PvoܱG}DoZ$α(kK$_ lM%6]E 21G~Yu>b+ [Ȍ a+5y5ZGQALg}9ak/ Vx06}_F4=kD}ԈG~JqP񩐑 1'oY6,+TQIGڡ`Rh6&k 1&ިRL #'&@dKZk9Ğ5boPU'֋f!\KՋghz7q|JǤj H9gb=@omX2[HآJtX4S=۔GzPaoܱ>==[0 [~~tF.qҫ@tMl`b2 e [RQA, )/@= ċ^װM+K SGy}}-O"zӦb[K@o .gW)- qVި1V ;ѾQ#F`b8*/cAF,rdwɪ8rHZ$JDmU1HovTDլ6CQ^+©Q%ʨUar|]1SNG N32NjZ;Mp;6S",&Pێ6ĜWxjbV&L2酲D s6R;: %yMv:*~zvS5צy$Jc 5)js*u1- 'L-H eSQy*\8|~~^lE`E8x"A*Yh$;o-(}4tl8y0B Ch!>CCۈ,PԷe7L =>xMO th7A A[BdkzSam"cD<b$j^ՍzOr49Lm"8mͿʓb&^EEEgޡ =moc" MpZ}v9mpxA=excvǶ_h e ,v&!=l `5lr}צFY&WWmXd5PP|@y;cK)vUEڮq͎>]CZkHvVհ X!,8?֤wqՎP;xrqaPwMw69WBp{ˏIdRV#[jq-n?o!*$8tX5wϫg2@gAFS`[D^8=0FoG)fR /te0r)m43ͱ#K(wNqzQ{[;Sߥ"vCwΑp5luZ g7(.` -ҵη[|x=6&U72IHˢ k;{oɻ bĺLlR\Ż -ń;:\LJD>g_b;gvպ~x m{PaB1&_0E𵒲WNZwBgjOXNpVN4gso ~GmHCr Gy|1Դ|9@u%dJ{xSL BʒH0WdD!(EGၞ'7(cW;^!z*T jLyy~kSnp[%Ѿ?-kt.03']<僘Nrh7O\v׿è6$ϴ{0Eh/bbaEe}8.|mu_5`<MUV;sDL`;g2.Q:YgYh3'~-< =sx9mx8#Qͳ.k֮g V{Vw;-e\e#"~NR| 08e7,;/H{bK^v_bY[dnۍA]fXu5IQ?޿S[.iNcz ^t?闟߿?G.H+Οh.SEfS$&}n_~\ֺ5o~kia[O==+xGp-ےӸXr[_߮8ğN'dcmģZnWMbħps=BbONqvTV۹ϱ5!V n76 c{Jb+?&#r{{1^ +z=z a nnNaHl՞o]t8t<Ȥ%Cф&BRD3^U x\@asq懩1h r0W"%, nJmS{ dIG(@@YY:%$G Ĭ%#wR[.>Y|d%< }y1O}_?ճn,cϿx!B1.vȽ c0^l?OlX5):[Yp^2.UUS;d;vs'ڈim٪K-Y#8^$hAY56(jD sJ">:Sҫ12UUiB2a0.QN0)3CA^+:Y!3Vd8U&XJZeA60XR\>R*q3ˌw; BWWgO1[Dv܆^~|f[ؿs| [06ع筽fevǥg]\לcX;;{5Ui"]s ]%1Omo1PngW_yα]wۓk_ya<m]s~4!];.<.a#7Yz-ZSsS?h Msfuy}@ȑe{sesZ+3+ )/ G,о7?X.4O~4od7ڣ )翖W0q}?=HZ8rٟ.I/Z4$Y2Hճvg[:m8(TDv/$>:NCge˛ϝ:Oy1,؂.m\<}jZo`zƪgWN_=L'Sn0_Sq5-%:dtjE[`v@qp3/DOKI?7wwRJ!x? iE8ӊ)R!Ji_wh!4Y$рd8 Up\p/X _sH{'ayyăZ#1<ː sEb `>QM26f]1vm$6٥OWgخ! J%LI.׿ֹx;5 rk跫cӫ ?G_ gU]jKWXDʽ~q1Emvk;A٭Kir΍:WzElhtHse ,% !&8omTMРd$=zZtP=('4B$J)<2Ny)C6ȁ vx4N67H SBV6[so#G.ޞI/6!Ii6.=:z"t$9B'2eѮP2.Fj)3}XjexNůөs>}.}v6~f.ŶGZO@3yG<$e܁ kT!WeQK[EygKkwϞun5|Uz`+Co۞<LH~4~C19i,ӯigci1iah>gQ9@IrD욠kP"D$U?C#=_O0)J+e%xǓ t΄D qMDY#3:377IPh!:noz6lnzO Sߕ\ 7ӝWxbT, Չ5և@)lx,3krr1}㖋yw䘷 CH"5UMHNX1U"=-2#::' H22aFk⑆30 Z $^Pʾj+_N͒Rgʛ @c)XA:h!Y>h1NLγT 4AzM鎦hO&E0GyƂIIp'MIC*bQ]tȜ;*wX^\⋱:Q(FX+,4Lg0/~WNM)H&C DIq`%i&dP ₘyB)YPs \g@E"Ykc!#6L)q VP?0G̈́fkӞx<6-'I~SA'@=$4lʋs)/J*ź^^Ԫ//ˋa>S]~uauQr3s ShXLVe /w+OڭHOi(/JJ= vXPe2>IXړI+Y掅~ =h-$rKISǦy+Wme4)Gψ zb #O׮ X64Qzր ΛsT{JQBS f `Ɛ:23 BEN+dQsw60 w:H}=27.(Pa C52 SN''03G͞;F8O.ILIR)d:uKE38U3.&ϊO<&$FH%%cKEgO6EJ]><1ۖ`z~ݰU}z*[*C4.E\++%[ZE)ϣd`="҄WahxlmMF!R C[YrJVrAOhIo*-= |&#U2VgMRհJ5[Xmfj Ee[(z[xT[x}UA_Myr36zBF_N'/b3HIѿTDU(B艆zyٹ:i0g%)LyN22 lJm&ce.9j:jb(fWvqՖcCa%|&UhBgLlMD D]6igPƧTwrV@ !bQ&CT#( I`k=&n{ؒy=b;k͏C-"TEZ^XgLNmqć{ F b" &P7YΈ3q@r5a4 r3Ctdc$&-Ho8RE&EO@vq̔KjP+EboAG%5ʈF)'cFbHTLYc:%Ik q..vVCSX&l`&7tޮ;" SimIp}e? m} Hi|#v4Hى4Ȕb UjGF!72gɑ4 `$E6&Ќ6i媸Z1T$&maA"1l3Z'N "y+td8{AxK97r^τnt? n\<'R68xHF[d]Ɵ7JJi4rjT/y6pVAD"'f;F8y`dà`_G`qgsav 'p55}oyDJ"(b4_g -#UY재ѽFw/鞱P$rӽ!:C5gPU{8l8!O0dx}]xNF㦎4y`^_ &oz}7q}So/oꗭI#%gv)z.n)v[Y[uQə5'Ws/Li$8Κm>$gUQ|)E?";b *-)y\&Pe>&NB+7 ͵^;4WfZ3DPIx TZ+BڧAV=&=;.Dyc훻%ՀɼaNDq~Нlz>R\^SbF%jÿdr1>z,Z/Ip+R&%hZ $vWw/}}~9-]NpnWSAKʝR{(?͉r cĹגs.e䮻kG%t<dj7Jv]P#0>G~xdtyjj}&g?~ ;_`&p@Cdk~ގƴazh:C6g]-}(>L[_nv.ywōGh횀{Jz3_1*rpUܭ+'3sWP!|I3fx`㋥hњxFϊx R!IQk0EIp2|nCyy%b]@ߎ WYp"GYЀ H)aMĩh"쭂 sq8([UWFWO+-Yu)u/FٙR;/4HFFUt^o,Q{k^!Cm^/ʷa~CiZ7|66ްtGmsFƪ W7}x\gPnބ!p aC4r3/dn%9Hy tHő꒲/ U.C'X\{ c it*] ~d8nznƽ_]za vw(]\vQl|{+"|bSB(ft:eq2+e阢ݢdV슮]xדϪ|̣K="VM&B_!l啂4u^nzqWAᛕk;}q%Z.prEU]@ҏȭ毄5ce=O+7\Mœe2]OT:O7٫ӱ4U^4 *VΫT[z.T4cUO[#l4M=&Ga#FCs<ւcGj:,ިDbZAD%[TW3kf)>pfP;^ƒPjˏrr-c1s)D֛^\t;X $>$#3Hw.7chj!3RbIu'-$P]G^`oY+2Hڅv!]oۅv!]oۅv!]oۅv!] j1g]5wpj8]5NW Utpj8]5cW VtHUtpm#GEo7p|g7X,}lp d"y%bHZ-;=3duwU^j8}5N_ WyD;Bxvag?M0UZE@Vfa[p5,drV  ul],=z;4ktKukyy &w{ 92"8gU08 (#*`ӁۘlϞ&/F~aTaL.m}"ªY1c7E0 =p\~|9Oڕ XR|1M/stʶO?6=0-Bk|v7M>m*a5kZZai4bImJ{jI4|Tw#;3]U=r"Q{hnhK5p7*Ցedž;>ޟ^*>| wC[N)BAB1zmf*O2b"ΙkeRhY2{XxOXq ;YIdYu,h!\V 0m^2(a#!S-m7OJ? 꿿F8R>Ǎ( Ӗw85QU*s$VKJ8=d ;0}H߬dd+RY !c썴)sewBdUȲFCfxuA9L.uh Ц(xGdp9^r!`螥ɯPr>|}#jqyCl/%t&ʜ fDr0)*sTYa#i2N&' gTV AhkO TC4&Ġ:cQu,+_m1dFeRh"c`S 1iID ޡi)G/1n%c_P:qIfR뉟'  &浅Gm "% 2b!El>;QGDn}pz{UUx\2~p3c :ш"ex4h&*Bv[U6T$&@ 0fz;"$,Qj=oYs5 u9*˚uGZǴJCPhuXi~օ^ʕRLW!QZNVroUg~ t{?YYq6 Gт`^ʐm ɱd&xiXtU`NďLQV tDd9`CE~˖f."&هH/޺{v>ȭco=u!ܵ-ѳr[e뻞'_Gy|]uÌ 'OMY+Ir@UWTǸ3Bg!4=Is<.]0BR.$f;IM:qR(ê&F7ЋSr;$;(c> &:(!׎j%U o'ۀc]]4XK&VwVؕU5mHJ[PkJ? ^ypQc/o\@z?ɊNsVma]sYɇϮ,%@ҳJi-G4Qj!  Xr1@Bb@qt 2I"gډ,i2ױ9EAdjrexYzxfloղNC\xrVׄ O7lm马 nٷ?ju%.%fP M$,g=GxH Ƞ:Ռl1J.E$)rAD"1&k3)5cg쐩3[3]g EǺPlO2nY_WJǢND:_3<0l$oK -ZCI'E%}#dUFn5I=O TCQX4*RE 1pY(3Sfxs3rk0%y,Z3Wk^kCCa%Rs>q1$R9b͊0&"L#$@])u?*kc=\B&ː+B(%]CSXАH:ND0@w;#g>Ab(ƣ]ǾQue{{[^F&'r6PCN!51XMo"*k&S3ŒqJr $* 1 dV&@$$-rtB3rvp9Oi1uv%Eӱ^T^&YD:](_2j$H)( Lv@'Y!θc_}h;և{z[@]kz5u5Ilqb:v\H龄Ginoʨ,>w<12+ce}RYU^bGdFFL 呻AsG) 2eYpK"{+s<;dQdklXLI]Ԯj$M*abSȀd:|HdAg:;#gG7IX]N_QC3sCǮv{.E7]c?my(U8CY'LeLIY<]L̑N YΌ*Yd$qOJ0:s9}DOr=xC!<'ZF#= UFJglv1񞛱xے__{Cg: (c9l=|$dikD5^8d,8LRRT(k&h\#p:~̅s!?߫D6/ al|_d2 ;B`VtWҖ [ 0rƽb#(Ȝ/IХ1FIM"Q5&ɂ9n9FajV4U OyJG]kDąuJq1+]KZ8ǣNj'Touٙ!p#ϕg¦{AVRz;=3w w4+>1cj?QjkדI8t?Fˎ-Lb$S4e gwTZ^нc{wow^N!`!̎OT/8j81ƓV64l|yY{NF嶎XG0iwRNpKߜn~W_#kKҶuiN]qn%{mc-ns Gg`Izݯ-x0('i€§ֹ/B_ߖ8[yFK/{~+Q]/qkfRRٵwv*O.E(B̧K "/rA_;9pZi bn8NY燈M9$bdkn7) Ih c*6PԬ.;5MpO`qIj2B-vޒ3jwަbOlde-t6 X{m3t.2[b)_jw}܌?<?;Ouʛl>،1ȜHh80(IP$e y7;#sTų#?|~s܅F+5Pܸg 2$?` `I3Zs,`IެwF٧cIoiB"u?*HIcd*0*TZ(<'6HU,Ca̾hbd2 !' DR)x8SlR 1$-9$eЃ,Xfq'ʚ_`RоJ2O)~-rSKdonӛoW{krxƓwrA;8$%+]DK=u+-̾O9<s:ykroU=1;R[]AyÖi)5#Pn< ӴJĆ}77 (xje:;LEFf' γߢi!Ж(=ZPqFa?a?tE"H'4,#O#[E,C1K _XAݠHiJvx"e##eYiXZ9[%hX>emNO9v0 tq5gޯF[֋5e5-w6s0&?hY˕G{x }f:GDZ7"k{rZLWSo)Ã9w}ܞ|\= zćJy;LGkhCT?ϼO|wvaqe`v0^[X sOdüo5\hz4Wytf4:*j6;Ǘ7{Mo6O?9zë_P?~~xNzw<{2O {D0B['@_4W?ݽi[5-ܥi~vlR}v{gvWSn@ZtwS~<53|دxԅY=q\\Oq˯ج懽\W9k;wR.^>ބ.@\lڌ 7U^/n􏮢?#{> Z`*)D!#FWa+%jэRt#_"#=vai^{~ŀÎ'yJ$24;u v)@% *R9Za+Mg2iÚN5J2j6&L2>"hu|ڎr7=tM۹V i;O*vv/eEr:>>h.N_~hFu*ݱQI];Hfॹ.=iE${?Y{23BbWa JLq.y"I<|9IƬ!bmb Pt1 2I!R?r/tiLѺڥ(b;hn|Ɣ߉>$^Rꍠg}a}HWg]w 㟟)ճՌK_x|b\/yxgoj}1N%pKPGǿՂg't?qKpԿI a]/ /e)E/%x%l{=+Aϋ9z=Ok+Q@>w..6_;;y-^ųz۰,zѢ*\}b4^xl`gGit67_VSFވ] x4ΏNV}%+g=4p:CTowi,@2ҕ"VHJa=SPˌOqD68rO͹*cNٚ>Xb4F'&H6e U%oW$NQ78[:l+(_'DPi)OeVNGa;׀#~"2jM^+RQ*(H3GDXт!I ͐e pE&Y:Hh.`X BNvoL v(ϵ;\:}s VJj~Me$}%(8j oWj]>^uBKx OJY@>Z2tC χ]5(Հbb#屘y,=TJyRhADa"XA ixF*YJbk=*h[QP:)0  !,:)'t"lr &TL!5jĹT[.fn![=f彨Kx}t(%:9Бă#Sb+4ߒvڳt!4C Et痎g KW Ƃ }:u}&i ZFǧFg~Z \;Vzy3T@Y7P=sEmwZ(WYിn*r@l$'D9#>蔯3gE{os'Vӏӣ '^?۟6q|z7nzW|byV¨dx"_Xv ں4^\_Nz"NX`HKٿh_SEv …P]Ҫ. ?! a(S"0T6 E*% [TJ(T jb|-&VBᧆ\5KRq`tqcGfjBi54B3!"dem2JCY0NàMwvIw \./y@ƈQIXϒ.Gc@lYTk 2&vSY!kGDp8V k4:HmRDCrd&A+6hqei,Cᛎ1܇JH>A7X ZKt5`.&7F&۱ ekƸ5o$@w3ԸM!0D9]lhfxhX7v|Jobx;΁s f; ‡ѵó ޟ"QOty:^mu| Pqk.(:ct Tfy@uCaPg +dNlqYXW0l΢N,"$l2CMQ'DDHA&#uCcSE /R:M':8wL{d 2J;H%#PXn:JλR@JJ*N):6?>KQ -Ag+'d+WSK"nĹ]QI(fX9ON:>ߕfdˬ˨E>hɔTӺn^1yڗIrq;Uh ykYEijP@9Yj{%)#[l0YM͑>hSrnhPEo^fTIĦM|v&96 D W2׉-J-v:%sWvѱփcCu`O "IJα*k-|6au[4\kg D;nۣȐЏƝvk>b|{oŰ 馗 JKMXdΤ4(HP@0H?|3P0H09GV&L^DH4H ք!'t$J֌T^NjC`Q`d(냉`j|D b FрG!eLy9.aGo`>5]6wjڻp>7yUg4J^LyƑ :"%RpE҄t>zu,/Z`$ `1VY0x NG ǝQFGeT ;A Ut=ȹ[WY$[nƎil6/{?;Ρ\STf_,ȳ3 \ȕ\ *$v2Dpp a"_|, rIbB[.Erl a}z/3¸"3}OϳYQjZzF~Pou5m5m"J}dnv~ NF╈ZZɑ:!H>!BFeAtq՞(!.2 0*SNͅkk6+bx6BNy䵤"< r`ko YbG!0}Kp3"*okO ]KI DpC@,ˆ%N9w+z5 "AyMFj# ј` * Br+P X;:IN^r3B_ i"]{E]{v}{7P1=^B'+n } o=ics)X`9AJn选ԌaEVٍP\)FSN6Kv&Н$0 "CpNRQ&&!־&X$% K||22.ԠsSxX2\BObxi"C%%IPĚߤ-7#A'4Mgl_yg_хڤ jhpwW7aw#jt8*l\@?hT]KQ]֐&uHn[mv׷KWeһۚl&e[BodnyZ--w8]ɺ KY5 DN),+T.hJ{K|4v zYe7n~\2Wk]|p :p\9l_9/d̎I>Wk6P3'%ARae:x1͛"&aKeؽ{=wkcOi2 +N_> *ނzjnx!b!]Bo^vsuI1fX_dpesKO , 4핓1nlZ{LϏσ03;m /* V:iǥ /?&ewg[~Kvzb00k蛔=-P} Zu벳lb6T~GoiEd//U@e4Puke I7(ʊh&)-xY0mjdU|tUrդ`.Ų>N5OU:_\3#~˦X♊5ձÅwՊm.y6 z"$ J-T׺H4o ̈́n1쓊MT11BjMGMl޼m\|!2"ojg9ZlawHOÿdBscetG4ʢ'B*u$0G8w91' :j}gDKʝ rWԟD`D"`8ZrB֥\(ā.0v%lg.y]˹y<7v}&'#&DTuG*9Wgt !hn9SJ0sj=1Ԣ]!?ۉa?w*;w;TKғ S#dm EUHgɨ”|̙ r2]a&y$LViǬQFYJy>%,nKOvlvFΆ'tr7aI{mi3w{?ӆF$,hf̛$PT`!J"SXk% ڂIBʁ] ,ywcny'")9IE$M fEwZI%HD04&oh0sngIAc(!.Qk(b"tKܣM/{I_71duV Tx3;|f.aPG d_t8BgRP;4Ia.k~$À",`85!jr5a|Ny5DxbZ♱8}BOetcFT"e|ZzGӋ2ҕ[3$GnZ4{ih)C:qn ߀&O1յz 2D{U,x1_/f+91sy5T_Bp$+oċc`BceI֞_E[7Ik7rBC1,`ua1j&zrg?]38ۧ6lmSq-ӓn }~]jz6 0fImC xſo7FUc*\twPG^eŇ~Ë'0Q:p8a &#@}owiW]CMTM] %!{[|D~KgKn)@f>(KȀfH-׫&xdp%u]7 U$su7NT)Ny]R(r6 K#ܗwU_{HA}$&s R!IQk0eσu!B֝}l} =]o/0d'5g?͑i>"i/`"v`h%g1|nCyMH9ho>!z! !=!%- k,h0R H)ՄÚ SDƬ &i%܁QR{vƠV{lyzxI1l))Bdzld YiD \E55 9.h2t>*$o@|6wm[<%Usu6*H48o?ߵ3EfC:ބT>wX[l|ߜ0`4YjT \0 X>zyk\;+X/0c,A6/Q~t~cvJ]B:+ziXg6wɗ>&ijV#lT3ٸT2у7J}ܘAݛy~T0J/ B]1𵥖x!YGH(򹖜N)bxЄ̋l6V7bŨ9y,9BYcb>a_>}r桠>dlV:^~(e7^I9;8=|$`+c,^cgJȕ"Ӿ0`g;;kdL(+ o[%:,StDGSd5Ǫbofܕ ӾTU+i,ydR A9Ov&ˎB鞇8 q{HcgXۜTiM )j|& 7qJe^gQ+|hR`)1E74J&fZ>}N.%Չ)*NrgW3UxS9F<]7\lˁu+̴yS[+p}i 4?Tm̊d]d޼ŸsRD)][`+Ik+3g iM}d࿐^δ Kn] ٬.ټSewjz|ӸD!is-Wh8\=7uousp;'(1OVf @mIV@^5~fEM^*q\d oĴnk߆('jGaO8'oqZ[j8Ҷ[ǓEx!0 VX gx1G_F񫃉_4IE6u~=>vN.Z۶d_$Ϧ`ņ׾ o?zn]yqxqSл x^Ylzm3-q#/awCi GcBM܃ omTDɨh:sz.Krr}_| s|hc~x%c) zcl/4q6;=|02X&~_ʹ9VfKp8c9[}PKl{хMٕKig>8Q\ 0&U) 0kKZMž[J; 5W`+^ \!җWZA2vp•J*l/2ֵWJI;rKҮ2ޱ*WH.gUV}L\p%Sæ k'ޅՙ-bCGw&)zSmYzsQ x\ @wTf(Deops!ɼܕn f~Wׯtd:`eLVydsMuj&JɈQLz#%(C5PPb'>lT?:IWr_$R}f_Ë]8u.W S1K4̘zݿm{|_,لaǫM# 9`!zgi/u0 C;>$b~sR5u-u %V;R *o(eRCuZ;i&cY1?_R {E鋌DH*,&bQ67QV)YJE~)@EmXE H6L{I@x͸ƠeZ j1r *|5P-EV}([O,;(?+oxx&Я(*eBp6rfJpVa4p"#)e(EԈȨN1&G \eU-Yt 4^T J#c1r#c9]mBaNp!2kKg7̄R_aܯO~۠rK=>1bSO, &`4TAgր!:4CI -e {U'=BCMtP+fNhcU)@"A[Y~8s&澠v1yڝx47,jJL N Yd@'6& DxUpј@0Aخ ^ABx ё&K-ci19 :u[3hrؤ6իwVA)QbGop>]M{VE%j!Vr|ڶ+e=<6Og' 4H6Z34'bhx^\D BGq,r ZGQ= Q$pie2(PR P^ճI܌UtҒk/Vs m\nWt3YIJ=3:=\^F}NR=dvRA&r OAO}9/wsb9AE՞Rýt2f,Ih41p#w5=Es,J߁`*3$eҠF~Oۭv]5D\Gg->vyK /ӹ QS:o]YPQ(dY IP'9TC B w3C$) nC$Z '$DNQ,%xS$"XАHka֚3əyg J+Fz`;7GKGzU&N׋j [bu7YڗSW+njp~W`NO>C>xՄIq $&*kaIv<뀧 GAR{CjLlD_|;"9!x&(9ʣ2$%OKA|V83#Njz"4,&&g)!P&jW$E|wN9 ºZ,MCV ew ?BR ǣpp2a?~ 96h޻'7Aj..f] HԒj-rzV '7if/_M{qܻћnx{rL!j*}g60I~8Jy槔Eh|}p] 4-|~7|id>x2쟅_ǧoN!ɥN%~1hb[id'>'NJ, uaLۓ+~czoJ_݌4 `4y.>`5deNrOQ21vФ^^x._3[aهҷ,΃;}@$3?ްS~|yO[4~#^MC;)ԋz. in~BB++w y/l`V^Xಸ[u݁z$xyyvlfx'"~ԭ+&F:1 sr>vErޱ٠^bVlMIi%4iumf;1Ǐ-6V6z@S|2`ŃydxDVtޘm1^x69m( '%h~:S$VגAPBfw^2zϢTh+v<]j H$V2MPJVKI!>t2t;e@/s`HƗboRe½Ǫ}N@Woe( wI]U5em"SŏU{~IH<Ɠ(+}d",Ivy6>}=(%.3SԒs[)N`F I6 :qX\23Z @\4u18q{c89"0i5.n>jKS̥ + M.gf{eni)9 .)8uѻ,ˇp9Zb|^Xzt̺f@h0Vd_sE-ARlZz=85}^g R:~QM7Ko-MYwb`ZZ6L Dmp"+dFSQ"|~,TZ *m>7Ws F@grQ e7JP4X29#3HJұ}C:niAQ(ApqZ .eNpT>$iuTX]E?ZT$8cD ͅE9N"4C#+}`s6q|agY"R-/ErLő4Ȝa?~*,>XυucƦ8D7m{Vk1fr|7-1<Ǯ[eSHx>Iy*,9Y $LivVLkD{e샳E`b0r=̥2Ƌrm]H6 __C| oaD$+{R'WtY aR"a8*'XI7BOo\Le$h}$zT4C#chRӐb|Qi?mT5 eca8s9pqꏗxog߼|X@( !~mj|nukXe-x~ ns5~>Ly2;ps[0_σK7x$G˭xW:>W`e64s&Q<*-K_!K%E-/kht$W'#a|\J#$ 3j G2oDMJFX^7 tƆ ECǃdQ"d4(F(B R  Gla;KNa;+`Us(z?a;YAK<ɂ# X˩TyݧVW$+.2Ǹ1 ,kE嗞.KzrP"xT 䀆kGb[tk3a@9B(ƭMr4I4,t"S.Mh٧!ڧB^eqc*h%FgqoR0#4'(x\kg(p+G%p~Ʌ,E-<^oQ ȅdB^q#r+rw*9^n})hh=ħ`gw_N_8*~>;*+ώR f:UaGhrۀ;9{#דp;bV%Fv%:V(F&^2 x)Y! g@fG%>HReSl^ -=;"ϗN9 ]\oIZW//I|远jË JӜ+s<7<\;Iދ7w\FӪqj Ir)W.zIuNy&(#8sFT$z4FUZ`B<U4~o;cm50{-#chnخֆ0<AIZ8'7ŵ`:)7)eik5I8-OjmбO)k"k"X *1j 3ꄷ۠''S&J on[h{0* A ~`> aaLYaZs@eWE.;kHE>ג)E 0{f4LK{nTdZe9DئTX cJ#68R$#Z{1[,} *P.ם,[UynqK /|qې|Q< +x/pP:β߆E.9n˽y;8}Jc_UQgs 5>~6%E<' sQRw,;Hj(!Ձ AB/ٝM9YiiOFz"ٹ),[JRD$=J{h=g4Bv"Z8GxFL ilcCCb' /828GAVp&NF:ɌmEO|K+n_o#|/TGcc F !Y ֌Jn:%%0Gq(Uϼ0 uwqvnW/,ISAw4o0Ĺޛ~1]Jݫ>˜aŇDYL6tny$XLjAõ:`G6A&*8ʝ7\ 3f4)c5x:FDz4A$#hGG`QQ/VFCK8Aǚa]dϖm5=;S/ Xz(yh z4J<O0 kl'5T[(8.s%ʕS5 @%uK|>K=d{t%IwdYT>8/JFPi5*!mDc b"xUWCQ{9$LôHML0*8v)&8+hm8]mbZW48rEl*Y"',b")\V0 ME^zC =tSvg"G dekU@J`tF; u~SB u*VQ,hdTaXK"2yU )d4&jR\XF(櫂0c$d@GŢO!`Ih6R71`F%K $zMKQ]E~מtFWXc ^H,ӹ2t0{7 Es.;*OhKpp Wɂ(JOK  PQ@Ly;y~;JZrAljya¨wlUB&ޖ*_%t/"LQtФKY>~%d8E(]%Ki;[%,/YWe@g>X;4Q28{Q:Mz;po;|76q<ވš<}o̩ hA^ٯzA3yy.z&hu+>hshro>_0"ɚ72w"SN3?Ƒ%44Y"SAFqF;q9x,DK|ڌ;"m twl !")UC2#m,7.^tt۽RzAΕeNF]6rQix]S\1sɭ`^L(^(8`JaVKEF1G)ӅU,VZi{:ۊ:pUն{%ZܥMH3Oxvdx_V8ΐ8C( P/^6¢)@ #)ȗS0OI*؞Ma= ;w%jwM{w7E` cڡ $mބM:HQ@xs]-|hyDj{Og('\BW5D{L|OWV|#{-e˺}:qͣ4FBZ ,56O"6t)ayФ"vj II-#q ĩ(>۫?sӤvE E>aΤ>ń t-c7CM{DrhI?e2)4 Xc MUBaB( MO$PnZ \[0#|v-x /U9ٻ8r+ #@|@&xG|$yL{5m[%uKeu]3u^\{-V/H1$)?]_n/K5|Nx:#[/HkCHԝ-n7+;"iڤMPrzBmom,!0<U+5liow>汣? 'cZ˧+Ehָ;糛?=>y#<&oU-xIyc#״K"kȾ$}8 Flt2ӹΟg9y%aszAAEf w0Lŝ&tw&(^ 3'oGz9RЕvOW@y}C{5]px~F] ڸs2Ltu:tbjHG}qn \?Q_6 (V3xt2DW8 pңЕjt%(ͪ~Bz.Ь]8y0AՁh5Di VڕzJ8t%ph3ẗ́JW'HWG@t%+(t%h/e+] ]^<`j~u%hPzZ)ҕdvV - k0PHE14;0ǑD%דKgiAhediVvӱcvYuwYM4({c !Zraj-ҫZ+h[zZ{E]8{:= ] OeԼ ҕ7+8L:F+A{ (Ӥem ` EWWs{LڰtnhӅRb<:]XzlBkԑ0 RtE+]=7:3]*䇡+kx߻eN L<] 0\(t%hՕtqShv@{7Oķ=>;_;w^ӏEo"jwJwlXξe7?=9I;{Hv̳S}^Sf}/OtrLJO!w)3_4e_)fw'$zn &F>&5vZMP.)UVdxpwZEK+AI.S1::- @v^[NKm̋O:av]3̡?EwЮ).wX>:$!)8{eqgWX]?A;Iw4i=S8a p 3$h[a(v8JWCWH=5ԭ:i]ơ+K+Asvӡ~ `cy@mX:AWuutRa$un~56XRӺ$ǡ+g2RNWǞ: nǥuCI {nS@Wz熞 [+fRЕe3 ] ZNWrisW+] ]i5߼; mK+K3+] ]v;z&BW-^] JV:AI] 0\(t%hJPu$k=3/t6nV_ ̅N\tiWa]t2T$ ~v8a4h AkLPu5c#?CWl8H0΂6 (+] ]y I0t%pG+AkJPڸ UHDW؛aJa>Zit%(jO#V#=,)ww~ _NWR~2{\(giKQձ*> CG: [ܕ9JW =WY?]=QŇ4 ] ZkNWJW'HWړv4]+(tXf0zUWHWƳ&=] ਆ+54vfPP:^ʚHsWa p㱏*> K+A(+]}scW| Rl{g|wy9ϻ w^}L巻 ~׻n~/=?Ѵ7 ?7WIfm~P7W5O] |IW%ݦ|jDrz/moC5BLt2im,pw[xJSywħ=>9app2_~CfH߿Cn_|gaٶ5gwHm^/#+vUPKb\HBx|Y-pBx^蛇]cV'3ux POw?˿%tkUxP-ߒf6ǖ =d`jb::rl݅DP2ΗOy?oA +snt+}9ߞ<u}Cm|k=QQ*Rw5zm1lv))X{⨕1;z燚}9eKUc.TQ*6͚}ʹRVݫRyE(Lu66h$tÝ oqV[W "b#=[SW|&9YtDkc us֐:*dr-F$ki4FJMME&rzH\ꝹԬ MmF65zH-tOKOA1Ʈ#d34f"{Β-b/9%|N~Od4!m^.`ouT=Ĭ\ɣ/ zfۗtEv(4*0ߧO@.\Q&ރ%vwh~B!V! Auᨢ}&iOo/Si_6YR!]ؠ8 똑,sbrf >Xsqޜ@2 yrNTR랙sDVĠs )'/Tc쳦;z7E"r'QjƦ"8: FOk1MB@zQi md# V\j() AZUc=Eb3l 7W(ws5j'b<#3k],>*CUW)6QO )ܔ 3HT'*Ht=KF Ѩ ٞw_KQ#Y*yˌBiE EA)&W 5h NAkah:~աiӊ#Q?k(Qln첫ҕA[ <*B8քDWX čd Dq:5CWg\/ xEأ(͛հ֞1*T.ȀJ`_;wMBA[ELzҥLcs'.Q!]52,+5&l2c!]Mh(pu R, |A\J}S88 TBłb2 L f^EoZPB]0a@Lբ6CAwVrE` 2nPư)0urҜ`e/n ̄A[rvg- eJ5gAx`1f#nnQ!67)Y_J'Ā:c2AA[:* pqbVZ h޶̬%JCH=2j$XU(l @PSaH6qW.{$u1j{VQR@>R]zF3y|BBmyC( T&%"#q`l,@5E@H V|W(:мGwXZIB>384(2Aw3RUeƬۈd1&PTyUDa8"_} k}gm>il?{{UЍ#߳e!0|txḰK<~r2+6{H\!Z*#d`SayCs ~,{/;+XX茼p4E"LԴjQyu(}ƆmkHtqѼ< = %$0AɣN1xQ oc`JrAkK-"W1X5=xi&k)$QȝL#S&w{/~3di*P:c+tme,!{4%#OP)_}CGlGM_I 0ePEzhMV %h-v6%)%ڱ"-P|tex jw "Ϫ90"{t$--A#@HD&e#kvHq%sOEHYIb&K((ͮBUqwqpDֱpjz,ʰ2ucF:P#Y+([j^ |MG̐ڳ&&)h&PYI"3 j;zrOoV 2$}AO3T [Lɍ!64X8ym\tyw H۴7>{uL j`ҍ;k 4zR6\E`MTɌ2ЭhךB$K9OՓFCoo1F< =; oUfĞtd E!9 sE5* v#bsfA*WRc wSPzS!%fTii mPeya6v v+FqdkcdᓛknY(N_aU,w([T1"@e(1#OW=us[sO6UʺT8FՂМs;c FnVvZHǚAF %_&Ld5d, (HR 9'еt:CA^Q˔?jo\A+w6U\Y36(=< VAP8U9km#WEmwsg|X l %_IvYߪmNJ-KCxXŮӄphY E! i>%7L0C AcO 2GA$%T@gI-n%8ˆW`PA \4x=Z7˥> D``>U fNJ;YޤUJpD$p۩ B,uI. , \`tƷ+ ,;uUr0UGU&Bӻ\j:],S0E $Ma@q|#BKTpx2lٍy|ԍ_^mwE ^O.2n0Mf>pJNq<ƌN/*??(bt,.Y12/z>)NG pCo։ eë:!"OuiSÄ`mL57A|S")HE (R@P")HE (R@P")HE (R@P")HE (R@P")HE (R@P")HE (R@} & C yP'2vػo}0\M?66x=9c?o& j AK|u7m_݌3UYFirꚰ<\ګ!|Q|?)o.JdP`3EU׭ |:. Qi]'|i߿z *ئb|fb#5$dArt} VXM #HeS7o#`0&fq FUܫ;!|ܧ;^N֘Ŋ5ӉqO`( bLW^lZx=z3Ǧgq ܅3T+c -C}GsZ}z6hI`$&0 IL`$&0 IL`$&0 IL`$&0 IL`$&0 IL`$& J #\i"FFv]l$>C%JR@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)NW >}R)%C@ ('%)*R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)NG yj}O*^`)ֺI/UB'ީ<_fCbFre#\FFd}ӝ.ֶ[$\K]!`zCW׉ te] ]Alc]` nI^:]!JmNv7tp ]yt޻Bbqt\6pYBZu]+Dɻ~J?({t Fnpj'Nn(]J]Ǭ67tp ]!ZNW]"]IlJwhM QZCtutP`N+M_ :uB!] ]i˖T?VY4e;x2evVohX r5O ޾lX^59w&Ā G3p3Bz#M`U"\BӈvޫDM Mnab4nx7>Id?o& jhp "0NP_݉p<'bc颤K6 ~?Zbv=w0 {G|ʬbQ$ᨃk~/Gk%X7J.2ԊmXUΈTUE2xf͒2ˬ/-}Zp̵&$ϵRjkOpuSukd+۞;NWVjuB-yЕ熫>d' ]!\BWt>QZZp>E ' C7GUh2P2zstG ~`p7GGzʮ'2;Е!zi ;BtDW'HWw7EWƻBwl=NFGWپu\tDW'HWZjot zCWW5t(fЕ3R ΜKIQ_oۯ}u3_l,G ̓}j˼6ϗ>vj1dN6 nv]3x`%e߫۩_E so,VB͙Rb:ː!:'ۃpnޣ Dg7Dg\ʮGg^՝dtf|־pp+ ]!ZͻNWP")ҕSk {)zCWWƻBVuS+8h]AYp ]A;OW U[#AGpt]-<]!ʮm.Lt]gv=8y`p'^[ckwCkc݁,Kh#ա/th:]!N!ѕ4[B7th]+D) ҕ2V>yWXU/thfp7^] ]i˹=+|jћ+Dl Q+g]k>휈O 1L|i*{~ 7J Z3'Ml[&I6hx ax@7Oi :\^6Cֻ1zHǽ{޽^D@(7?s ,)q93|_U81m47f+OkӞ/ 'P`,y%MN=዗_xߛ?ZϧM1&I57*\so=0`{܌Ǐ曋Қfi pfw}qsmzƁHF jqoi}Y;l8o)Om}UJVudSKAו-%9kt-Ȓ'qN򕀈~бm8'b)BJvkZU>$]E} SgLp 4 x+XHHXJږ\'9W) p꺲u)2ƋRR)l[ ؚByp,.zm*<$|T҅J^zWX|>, `@Bq5npcLȭ+2uGYE%dQD4 F[ ,˹1֡16+iz[f]ﱶ.j 4u&?s~/P}{֋rq r-}E?g[y[0 Z# u>s]gz;i< 9/_<^_}գy:(0Bc51 ڀ:Si09$1EIy+Kc׽|xUfyqo_DZZJ oo)9R| _ /v@9R]*nX_P>L0I9g6u A Yc'U:pa! ީ:Za֒?`yKsҽ{N:x3t,_=c'WHBzW1]H]?wX6qiq{/kYebؗfQOܪ x_FuwCP~6W^['A|JdAj\qIΥuqm0.KBC|贩u%B+%zU&cy8R-VZrU~sq\N,)Jl!ކJH*/|vQI^;\۠\y'0sPͧReX4*Y)5KJ\KR23M 6ĝ@RnfQ ||~{]8;MA+~_i]Oӹ QS:o]YPQ3Q BppBu\˄B*kl+^1R97~Mmd>9.pGpQ.vׯŶ xcnʗH;jdWpIby8n5tzQ-RﯥnFZrJpmܼy[W2|\,ܳ(iEy}Í^\5~'{,YhnVKfqGpɍm$+U`~'$=|6QvhSWؑ.6WZxըؒ??yaȒ$#`9RmD7qWuE:m4sO{_)퐻q2xy˙[<^4._'i٦:CU?2MLLZIXa[(ZY{f>7<ޝ 51l$!fpܞ1AV7*ΰM.{M.jAQC \Bf^2zǢTh+v7x?: %AB\Nt$bpg&7(H C|@dw(́!jM:MƆ}t%t0q].@od( WI}g*`y[͏"EH<Ɠ(+}dZ[=fn03|/77XPI<]$^gSyN`F I6 :qbh ƨBXUrV@\VH  PHB ;\ո-Mђ( b2YoMw.gE>Ȕ CK |lvUZ^n7:D^;o%n5;AJg謺̀2\`Ș_sEZ$ 6F -f=ʞfh>Fd R:~QvSeyX L+`ZKІHhCX!5R.hn6 w"fa 7Ws 뭢p+4(*nF!4X29#3HJҳ}Czn61Z|t0lRtBrJ2 +vMhheIp$J.8z! \\' H! tRYIU\cR'Twjv@lg^._y8ӗ~6ʞC%?[WA+墾Ɵpkaoz38qO ~KhxKd_ mSЁ?8,_qN0Vb4C0J(ES^+)ԏ9*}jy}EI'.`B; D\˄\qކ2t F4-]i=f`~: #D Ȍ~U+ꭩ}b%] ׉\>`v). }<ظeyvEʏͅgGb4vE̩4bn5`<|6oΩ9B[M-ɵ#9׏t5 FP9:A04 g0b^'y;Ř| =lɮQJ.dvhe$,}~B$ 8t0Y3NҩBEg 'ȎAu|ݿ_݋oQf}͏/p B>A؃Iape|)/54͍к]/'2Z;ƽ*>Bո1; % Ώ?}z1!G{͎GhX5"f8b$tQ vX².Se$pbqހWz4/ Axl/~پjm;HqU߸D)J |;*L,yuѩ1Ny;lBTo#=UyU#=aǃ4x  SqnMJG")2`I"Y.)h,&%FtTQ1gt K=dggvss5yMqveB YUB$P ٱkN|nz@lWxI7CN{FuB"*1JB#ʜ 0*y\$9W ^6xv`@)m$Ytz @szemqUT@7"v΀$(σ#BLQ,7J*oKgblNΕHOFbU@-,sERR((n' dm9C%%<`WΞAhv')As9H\R$qiTm I!]BPY ¿\BjKL'yF_15W⎻,-1L9F$sCiwyN+G@'^z?H2 ['>$^:q^ Fb Y;F nDQ$å :$Iu&AUBr@=1~bkǴ҄%s3vF yK[o|Y1fK䕯u0 $_A'6E>}b>K$B;Ĕ]brqzܨ)<4@B$UTRЁУouCfP ɼ:&`F&T)$ CZǾ>GχO0ⴺ_/˫S{iGJhwHKُe?/W5d_VcmweQI]RڛFDJJSU8+QWcw!%5{o*NyS6U#_x:0Z^܎tbRuFP< _V3ZPik]@}ð?ߜfMޜH1_CqN/Oir'qNgǟ8+zQkx88j{=Qv'1hɌ@"otasϳ*ζ?mdZwW_X:o%Hw7IVy{X^mm] ]Z7mw[Kߴ7&O|6NV 3W׋r%)}?j?\I` m<mmyl㻧9[~gmrq^<~4ҖmI};{;_+;칳a1?ͻ-.m~q_gUNa,[8R?21int -m}sSW7{<o#t+q[\}|~3ͯ[]^:WO9F. uGnF_,v[R:P n6ݸlvyHtABLUf( Vjt E"gTCUrEm#1lr=k{a: kmI٣!l]5νyI^CUlWnK]i]WLieu"[ab+jmjNϨ!Я׳*d>dorll:/o˳>?WʓC :0TE)zS~i=)~tTS*GG)H~\kӞ_f%;y5Y~<N Z7zNoN0<%TM27U JCjtSfRP{ZMxYﬖ|Vg h^ [CdtFL9:H:40k w_u@FCLQ {MBFX"()JQNQtZG+}4tŴ.LF+' .]1wEJDsi1Sjt5B]y)}^ 2-uŔ^܎UNp<^fU?ڡVG]rIWV@G+v ejJ@*oEDģ+U`bJ'F+ HW l\4b\cz)BSLHieD"`j1:W`%A|:2?~r xC`ķ{))"z-LkRTq*S.LxdCնղ,2ʼ*+\T&#z>9(;8L+Ô1>kF+v>]фDL .t]1Jƨ+ԇIW y>F^B,b𳏘Rut.`qEWD!#Dt5B]y&"]1GWL,"ZL )|9;V?J;|nw/`醞.5uehCQ|]}^*("KJ|bJ%F+LATb`OqEWDB+6jB奖銀f۱tŸZĢ+u2t]% HbqEWLA&]QW4* EU)OZvŸ^n^Ӥ3d#ZOxoX1L)U,ӧylD j3f*ZzSkоh DDMtƸ1QGgLiuF턊)s Wh1mLF+kKW bvWj TWDQ%]PWq銀wV6]jF^^&]QWxb  X+FWI3gZ+t)o nUTX0v][8ͼ'^'fFxbw]MڧcDրvpQXtE U"J-ƨ+]O`qEWxSO!JX{>z]h^WL\u󝉨#5Uf h CbY1h4M(U2-iTW9FMksczw\JQ'իZѹ!,^i[~o7uur[mLf+.o i* J#[SJk΀.55*DQ4և7&ZoO'*ܖI{?]-[_M.ٟǿ4͔.ټif6 mޛm[e,d\۶Dof-w((|eWg*򌎯d]O4ɯwWv_q@;R<~ ֏c\t z7)NݼvG)dDb`D2(AؤzmHW y4Knb{҂]WLdu&`wGMh(Hz9;V<>0ȡuVЏV 4,=t%z)ZF+uG]WRh2ja#6FE+Ƶ6]1^WDiKPx~~ l4b\iu S QWJJ1 1]) BD+uŔƨ+q!ONJQn-̬ʪ=K 3_6求S0L##\_;5ʆ Wi:GϒFj4^k$):#\Dg "i1Ii]uE8Rf=qEWLkĔIWѕLLb`ϣ:EM"uŔF+$`x6M,b3 ҦueT"` 2]1mLRXp*@6ꇋGkޏ2 jߪc銀AhtŸVǢ+} S$]PW<Õ퉫e,jcA Qu_W "]1-@bIWѕBHWlFWkEWL>t]1IWcԕ]K10Ty:6A|Ѭ ;?ie:JOYGEELXhc0ա12RSYF }yϴ*)I;b`w>uŔ&=rZtpX"\#y>ƴ&x]1KU0HJģ+V,ɔ)|AOV ?x/` CgUԏ teh++Lڷ6ҋtEO>uŸĢ+*t]1IW#h0&]1zg}G >t]1JpqD"`']4b\iubJt5F])cBX GW C'Տuud7NuiN;[=4erjLD8-ʴS+jJidH4E|OfM9 fվ^mӪT؋h1茁u<h3uztƔާlљ؈tE w+Ƶ*]1uE(Lue-jk"YӚ1e5 TD"`:]1&i ]WDiLD# "]1wEVFŴR1''6 (חS+m&m^M~=VR>̯ʳ'|QߦFmE_TVܲ_{+zuwtچmVtϯż̯⢾-DyyNOv /|Ae5Qlm J^]e^_}yaovsŎ|}s⾓uuwO>>[ΔVܓUi& 5MM_ۏqUuN{ʶo,:F -0̆CBد86gC2ߩ ֿοp:2Q"#l~ῖg9h3-|r+sDЮ%uޫ\ i YKSo^-jBjz]Ηtq?]Y~૿Q1|V՟ȒNK2_~00eA~ڄ(R )uE](.Y$̗"2,Ԍ4C<Ҭ9wP=0u/w>?2L5JsΔnĪyc֝/3J?QZl_d~?4(tqLQ&cmcDȍ0Ŗi*JGo8s"c51PhM%W{ddbdq=Inv*J5j"Z mr9;~h!WޜCR-Ҝ޷^ qzCd"w;dBI9A\13gIC>>>ŌiB5CcNBظ윽B1qfFʳRL=# | ̙Gf? L=]"=6.ʭcPM:-j*0CRm`+}g3>L5iL) D#n@#>̟IگqC6)6p \Q,k!ʹwL|cXFysɪjͨ13m3]NfjZ>6XRNka'F02jPKٷ 8:E/kk]f km~M&VCK ;З ɍ+7=Bl:/z@nվ\k.$jCf0L,ٺhbύ Vn)YcUdB` `Ց%CӢXtGj/`gݥW 3)P4AZe6ykX[ ɞBEEG|c0ZX ÅoMhƲHr6.Z J =TjՇ %>,dp,C^q9 YWeQ\Zj)uzd;;Ƴ0npѡ InaMiݚ 膽Z$):TmZOt^FGQ\f-sk'&ߦθrTb0VrOCn@6'0 84؄ W25G)מꇡBU(M LCd,ܺލܡ =*uFoTzJՎd4Vcf#*, )v)/nލbE]n1ќ48jG4M8!ce_m@|7i}y}*i]| ީBoXXѻ L0c"h2X + b|PT8xiLdm.g%Krkk%.2C6a1'4ū`Z hduF !:N(ty,GѺ< {K*`KU[ Yԭk2h w3h8*YȩՏò?oɎZźf퀛1|h"1J_k3dǗ _Fdj4KVmSе2"Y걇t\Xj@.B^s` ѨLl9wKbib#n _I aʠv?a-%|m9Gյbhǎ@HA ף;Pj^v4s&ydqB"rR:Wys..ETJ30fDi -H?V BU6ӠDqPk|?$gHWY MSLkdg g֊oTʽ$/޺ཚ"`!-3jw$a7.0pp_\r$];0^Z@2؈yp!|;~>/geګӋqǹAI| #xB+fF(Eq} w1zY: kiVV` e0ڽA_Y_/φϠoZUfYl% <%:aBr8K~A؍:zP{V]r :T2,0URmK{P <X׷n3셍C`?V&sBh;yUOPj *W([12@:=|xƀᗻ^ :H5-0gǺ.T:cUx hN.Vn5 ZAA %!LFb2⌯(\H5'е%G랃JOSE.={pWQP k /51z7&a2kA3_,V M F;`Zp(LzOQs%p8lC}`;f-B C=Ap&/>#W=Km\U'N,P˩ՆXM:IRËH*8ĕerk7 \oХ5!.t B#R0}`AAxK6B>/ŭ7[{B_{|@CЌ=HF.Pœ{S!xOGB'нݯ߂6G~s9Co߼8᛫q_2!K^Cn0wgwhvqwѐz'۽𭜝M<wbfRG˛O/o=Nvsquwɋds?tM^{Tś\;]]|{'_'u!{_ ~ v{zs9;1zw*{;Mw5ФP.p } \=U"4p+ \iJW4p+ \iJW4p+ \iJW4p+ \iJW4p+ \iJW4p+ \iJW_4p aK+WArz64p+ \iJW4p+ \iJW4p+ \iJW4p+ \iJW4p+ \iJW4p+ \iJW \ָ = pvp%9 \aW+ct+ \iJW4p+ \iJW4p+ \iJW4p+ \iJW4p+ \iJW4p+ \iJW4pՃ XM0DݳeG!0gns ѯn &-|aҷ͇= m&09W>c͇}ԓ;+kѕ ~+t%hc:v))]=Alm ] \[+AӕL"]9) ѕlkq3fhӕtt銉 ѕQW7o m+ASz_.gvo2m)e6jd{WBWAnJ'ޝv?]^\߼c409IN; 'Μ?wه^>=;9Tza齾R|?G}\: `^8K{뻋Vx~qu}ڮ^^i+Lk{/)ɫ =8w|j&gUWoϛ ^ׯwv{ v]Az&ݳܣV~aLyRŏ,ϧkIߙ!!v7 ܴD%tF%0[+s5_nA+AJP=*±-]`Zt%pm ] /}1t%(^tuTnp4ܼ (ѫOulikflmc+A'Nn'[8}k30IEBk ]= =sWt啮>wxCt%c ] ܔBW@ ;] J JWODW8lG] BW@k9vl ]9g.ҕzщyRP^&Y <v^s}jEl nuIU5iYUbQ?}}=pP A?Y};g9ogu)c{fթrϴ,𼌵-dEx鬔_x= pĦ$uMoK7,^ |kBl/oϗ=᣿܂,ab<.zWkTK!f-pcwhUzxlҩ̞3U\+w{[ `PD>+ø72pL%!:2DEp6h)Q)Fؼh>0k)An,<ܚ9o{ejg(i0Lш~ۣpc^-WXeLebz<3&1@[?r}W֦?GFjiEo3mԸRg*ѯ̎"$hVٍjsѯ%o.vU/sW],39v%xXwr1j_MW~]}op 7i2{HyI"}[5uoMWT}sչk1 gW}ߛ P~QzlrX BXݜ.ByR"~ftQUi2}J?ڷZ3?2-d3ܮa2H7~ME_ջ@}| k6g7P5E&K}f 1h_ߞ@+mQ\tnjR0)5/fcOx I E¥X0{Y|3kd@òo_c~f[4ݹQ[LX[H$D%K3g5F6|BdPZ&cB3 2GCNy)xGƲ$S'R@K Dsͼ EzaQ2JrFc:9=1BI/h#eܳ8wWSq9VFj˴wnSj~ۉ{ɒ 4b#I9qkso>?Zfa}y!$ *$DcH^EA$id$HAEe9ĥ% Q+äPHBc ;4C@QA4EKhVZg4PRR ށ8B`2kDLT:8l$V ):b)H6 B*,4H'B(0hbﳸLK}O'{z}R.'Pn͙{OwpGP늴|w[1>?>*A~Hvp89ڕv?=9]lLkQ6-'[ [ۅ!/!ۢ=C,uQ! B\D9 [*-IщrIx[19{;$ջtkS&mV$[‚:0x5lෝ'ZrI[?v5t"ӵmJļ<ܑ"WSxt^G@cY\(-4Ѵi>똴SVF&TH`2h$( '[nTYF Lro^Pcp[*P.Z(OR=S-y` }@R >вjy 9ի] 5t["`Xh]TZd) S$?0Hrpa AABx@dE>0)qт-$hЁ%CF&uU^z7A0Ma|Z.Qfs3DbKĶ̫[aB1_n)ハ{dp=jוݾ!b˛M_h} wU]ҬyIR*uYCf) M{-ݯN1E=A/Ao)=u1Pcsr~8Z>cJ'KZ3 * H!2ጅ$(b\EV*! Bw!yh\pB$jn8!!rƯd1(N3"A{&ϻAȯ!; ˽i.IvSsvsSܟ|i1/e:ZɑJꃧ^PM4Jo@)IGG+kaIu;u"v=hC )>F$ZH|\zO""x$CR"$|!O+ȓ ì oA+qg8z'BBh1hh"J 8K<2Q+-"yXxgyr}mԢf?  mQmn -6@<7(rb/?íџIw )f)Np4^vJwY?[dc-a?YM:Rҕ\}.lM )B,;FbC?Ṙ1fMjGZDq/E:~MPkfrp8(g-rwKisW[p7(b_隱g&O qL@k]ʜ\ @Qe^.wh6rʹfWAU=T% ﳯY}ygo'U)R~R*$so7 TՀEf]' {" oEXbN6o<\RQs9l)w]*YTuRV Zٽ,hf̻PT`!a~Ie)5iy` mA$-+ aʶ]3(%")9IE$M fEwZI%HD04ݟJDrHGZ7 Y; d0ևOi.>녞܌ٻM7LHQIu\5F'jIs` .u=l6N1Ά?ݩ:Fut컷oҟ__/o/^y8װV`l kH`<GziSCxTmMκZ-%)0lͷB(=~zM\ trkEK+| )(*UsWP!\K _/6GhA6ޫ Q>z.s#V7"&{Z#,K 4APphy 6 JS E(ULJ<ѓ@ሧI4*F Ft9iUQcݧVv1TIWS]dq%cD֊/\`Erp3:c9w܍Pܢs f:wU.b^VIγa§DlPWWG-5yu½5ڭ??M>rqӻG-U[8*0((ei{X!g&DutZ\]AQh>HSDxvVMkۖc?G:P7;" X/8CȉR4g O++|).kw:k|Yuo=R\[5`, RQ}:5(瘀c&x92J#"9qGkZ tFȹZBmJiv󋬝ƉW Zѳ,c{fYknv&YjdIB?/$9)LwiPGiG1O# qeu8y€jhSFܢ.ǡ@Q:l Gcni E=r<12B%;#  /i/W$T!9$Oކ[$'mgʼ|t H7\иyyn|]Mg8 L8皰GrP*ڛ\(*p;'Z3ip&kdDB"g{p\0# AR" Y0(9҄|u4DTZq,z̝vX-"uzf!% 3 ^~4lm*z ț?O|V-<cS9PRPզ.OdD_uP J7CesZkg«pլOʿS'ޑ4͹B8gGk8؝vZw(:%R"\8L8QFp ϔf6r"ѳ1RT AHRVa2Lwڔ:`ZFL&Z iI#gn@Ef5eBS܏]^VomMݨ[txOyòdcQok E W=]W]GӢϪlKzoI"76IAq"/QM bx+tօSl`*Q][']n7-ﭞo7Qyf~_&-x~8],zOބ2wUqϵfjҜ5MҶuO7SHJ;$|rl%B5R/{{>A%^ q b{4rOəC>w%p,ƨaSl}mmvZ=eى=XVX[FM@JqÚ Ꝋ&2KxN##QB06jF,rH# be}0z)#"b1P=+CʘYh1rָ|4v ~Χ;2>RC=WB׵W6;s(w3%v^hL;+輆 ȲD-灯 { * 7Rixtj^cJ0O6W^@y-9ۥf]vň sQe9l&\Ys-$i#ĚR}rKH'*oZk'2 dQiwRbdS_'G=|벛 jQ;& c2f\Kxݫ<u˝l܊,-bԬT2a0ظ& ¼&BP0k%a𨢚+&9NxQ >[8;oLmc6΄y&`J%0%=gBaURC9Oǣ~Gp) v3|%gS4aw#R'1/v.HK9MV1F: lp6HF_jRz\Tkn샡ʺlK\1wrcoːݤ܃ nC @@0Ȧ.퇟APdy=βo"z\w`y&mlWi&7 n|sZ0/tK ڸv RDBѭM*r(rrJQ*U29 uV`U"WLfoV!vuĪSW/P]i*U"ɃQW\.E]%j%jJT]}9JnMU|gVW"WA*LATj2u%dv}JH]҃QW\E]%jhJT ީԚRWD!+  u( % SW/Q]QA0֩Q 7 :@߂M8񚔂Yl5(A?#iVt/m+]Ro} t-ě[oҐi9c":f´zNLP47nMoUrq7Hx?D$%7gBPsgQEG|TpKC^h6_-[ϣ*8qz W&Op_?EXJ_~,zW@2W?C%7Xa먎MXei[dJAJΠçdU(Lؖ\r0evߖZrkL1Do^Bbx<<[+EivV3Ma)|&eSهS};0l{r*ysD0C`tVS/PMsƝ2˨xwHpL|)Ea[X`ܗl_Ȓ#ɞ,o-ٲԶF.6YX$'n2:aIHڎ#!hD7 X}VwF4e šJ -ɧGV2F.Bhr3 ,GO痗|עӺm[A!foΪ-౬YuJURJ_*5Z\AwMzϓ -M cqOV'q&klMvz[6_%eWC.++o,7 why?b_1UAvR)d=becbXRގJVpRB1ؚUOUVUR om.*ZvpEV[+,;;\-WdyLU!WZ ]]mwkXgF؋X)9\Uǜ %/g{癋{?Z$i-π+j۪Lq+2 \JYճ4UJ23U1Y ہ+(Zw`Dgઘ;îo<<+f&?O/ykfZvox87Rvc;kMW8Yc+94] ygઘ *~pU>᪬ϺWe/6^,}NAtA-cɓe4UZ>(X观J̞?np5u7\w ^FvD/;^ G]zY.os˔`~)￾Qw˓~KpL>Oώu/;oG4tg}7xWy}nZQͫbUYٙꛥf9h+wSR]n}~L=*QUTLΜP4qD<0-qdzɀo[5(cQG<71 /SKs'JӭKOC:o-i}xx5͟ReD4ģgix4Jg"ae[\oW!Q'~V7?4}6!5Zv|֬!֜u~l2Tb)|н'wgG3+N,<%H.ʁD79$q&[gCr!,J}Ԭ]Wtr5[p0{1⽄+"oTKTa6ФQQ[fSx7J=Ѯn~ f:hqMI ;h6xq* $v7Kݖ{.[} CFgX15 FW8Mݮ[fiTIG?snzV*a]RbP{(/7.1ͩمRy}?}'#c UΎ-+a= u}!!+X0UJFrK]A^@+vb޹jDa B O Rhy=%4UuwcznZ=ª\؎y>χ7(\mb ϻo?_֤ߓn욂nm؋=9-1zzΌ6gFFP|eCZ1Շ!Ǜ]AGcfg3=1-ćliݩG?| d+epBdpLHdC7 YIC*[g*HH>'LtU!l9SI"X2,uL,X FcPJL92#r㹂wAj͜Pu&ONTc :a,<6Y}_vh">}UmўO:9C=D`Ej䬎[P` ʲEy[q9{;$5ۇ祩ó'}]W %aw ~=ˌ|9}f; Mv7U~ذ:PnH]%ZLA2Z#QFgyF|:gDYF˸&!hXr;-#_' b;g}}rsPv= իUFh0Hj~Ó5ǫYOc PEc: \h!o-0ۗ_-F۰OoajڒEޗ->l=*0 OXy!=evZo3ds Y%^`FGO E uQ;c,YY&Έ:=CJD҅) rl5sՄr487 =hh%QXr*a6lMtOՏ(ۗS[o2'ł%B:`o<@u 8(ꌓJ&=Y*Z= +RB(Phx1 e 9Ze_i͜}e =pp.pn)C8)2hsي]VzҁʭnU٣|̾q1m*#XF[#4u0:*a} ktrF'ӽS(Ôm_)2Wi:+MsDS-]u'V~oQ37i{ۘ#nD6n]еQulD =9r/5rZϓQPF9VWQV2}gaygm-sg !" WseuĔ4k[g)Bd8RÞNb2^VCR).HY #e:u&sT,p-",700Nk͜}: Τvim-x5yD\.Ӟ>{I-̤_L՝`O}D}/B(\>m  %^Y|BBy/49yyяxn,Px/#Je9%mK2eQ)b9Q$Aٞ ;}H4o:+쑅u&UYu䖧\R!Ԭg9xGO1D1%SVuhU2әV\4ЕVL/JVrropU5#mK_Yp 6OT_Ɵm "42) ұ Δo oT$x@"B$ 8'TYt.PAFUd1DE`΢g[:LZ&%VY Cܦ=pcph`';=K7H듵}T/\8#to q"½ѥѰ0n! 2ĈVʸ* WE!i/)^* RDdJ1"rڑAYHl` "ЌADI4VyR؛cDfe"V7.Dr(t`\?qqg^ݚ9A xr3>_ύxX캰Oot ݹEͮil~) Lh =)K9K[͙$wOzp\eޕ<Ή8^f;Ofτ2dPAKGNk{ 5slSf],_ȲL n5:`62lh"PBnM%(؇^ûsj PTk_ bĘDbA Rk4QIK$R"a%yk1do7on.6@׵z-Ua'u8oNVvb{k9E嗳Á 2*M?Ax&8 .4ZQB\ddaTK ۑi-Wx/Gڤ,eQG^K(BS /,' &@%6zsܷ*FT8 IR. k))nŠp$b\Z#g`[oa 4\ߌAwY7H5Mjn&q{3EPWTaUߥ C\]OFd4 ܠx|rtڃft&8C3E:x7Κ"aKeأ{t+c' n2Ɠ,+5;dwz)6P4PWLǑIN7< jn'SO1;i{sV&@>Y~prz_M߸i`m\_a'e RQRR|.?X0 sZ*3T}(~OܟGH!8IrJ;Yʹۛ5XK~.)Y&w vSWTWwwyYd󋿿ybyc$AA)HkXi_Otk^ʜݮ2/jok\ `UDPUq$UAC TZ\5uf[r2JM2{vEGy^uX4ӮdxOD{cꍩf9A?1P@-p<9/)w¦~osHE콢 4'$cĹגs.n"؎K+ŋ|&jmm|-;/v>4^}zPJנ_r qjHFG2/h4ɜ)r%9 bѮͻD'ѷݻrbLɖbLTý')Fj8hõL.:#lɨ”tݔk)%6 КײТ¦3kr BQ"K*-Laa-q,xOWl%Cskҥm>TEPK;"E RrH@* J3K`h?Cy—{g-qc3 +%u#tt@;Bv3k WBB@ӣi<,=QCh_do8dڿִ #@!K鰁mHTeabR n 14'&)u{Fj]kP"!Lp8EG:)d( ͑6$v"oc\3 hVC_:?¤@vϒoYQoSFWBA'.:Jt?fM^,`HP0I?8nϗY`0}_L+0C0ECtx!C3kۻ=O@^Ϋ'pb0 VU P&\:$Ux\Moa޺П$1gO33K: BQyps4.ˆY B,mT ۿ?4qijMaLFK@Y0vVVO5X~.K煉yb$zF̵ۏe9Ts$?N_Mn/ߖɃ=)t iab9?(`Ŵzw}vcy Zk=Ȧ^ƪ9w<=NPH~0r8d%qq6(~hTgX_g;'G}w瘨oO߿O0`S8l !l~~?n5mu ]S6Z9j76}a-ٮmQz8] trkEM W} )(~*s׾!L4K%_/VGģl$U?s)$XϨ5kI l4lBP5²T@$6җv6"//]t88H !`AAi"&AI pSrtbEtZ9)Qkk 6@FO=zvX YtϱGt ("D 9l݆1`FJDF aFk aS.Mhٱ!ڧl sVDl 0CJf/X<[kMr+307#fOa_HvԪn_:PÈ}zrZ}]T` uA/bA$k]_8K|M+ۈc&\Ys-$i;_ mR+ |3yaVʖn7!_hpURXlJ Ǡ!P02C-/;Tg}ބv0-'L̇hRY߀nғK;k [SQj:T}CG-E>L[A]U:nR& :; t2d3ŦݸOޟn 2Q)pʈZ6X(ߠ5iVE^7I )]jȕiu$k}+߅.&ru&:AV[LYƮټk7jz|Wus5W`t}:3ukfp;)Y>[rj9F=[몛BśsPl}/~#@6󾢽7𥼁T!>VR+rfυ{M9\IED/}g)d=ġUEwYsҒ.'y6-e3B]!jm6NSUwwۯ;_T] bxf2ǵid?}JcZFf/mdP/]Sl3XY: n)heOLPR䥴g!L{)K67Q{L Q F)c?∬8.^:hɓhق k,h-6)ŝk*&NE R42%c(m^60U/s`,\𪛂C.mV>ZPW7\laggJJwmmI~αR]e`2&OAW[LiDŎ~IdQȦx(u;Y眪﫮%$+9-`_϶DݺI2f2 }lKԵ袺@uiK͜yK&V;iwD6y4*jj1P[TPHGXU챸*cwWUJ]Ew W;O⛩ORc GgC_Gn??y)TM5ue@JC~|ۅ=_&__ ߲HR ɗ2Ka#;UvݨǍnnғcǩOV[?ĠxC}C bEh˜agUbi}0J;;{;鎓U']%:wҢrcwWUJSǔG㮪ZҎ?]DtWh<*wUvǃx4JK8vwRFqWd4c"U`::@ⱸ*-]U)m']уx//>Ǯfu&L'jZՒ~W?ucMo#uMz:r]ٵ:?''={usd]ync3#מHRHcbP$+Dh2& 81Bm92(^ P{j/@^ P{j/@;x^s+#BM}x]g}x]^ׇu}x]^ׇu}x]^wW ]NY6x6׶ڮy}G9k}Z=cP5ם>`>`7iBۿ>K`T-z5G2V5ev` 1" QyI7=@sӠ!}a<{+~Kyfg͞mn!. kD)e!,:d_@SX&2A.w7ĦCAeyX'2;O3󋺠od٨/.9?9|g~>NZŷɚpH=={V~[SڷpwP+vր;{. {NRlkus{V7\խo DWEv[P혀< 2!X0&;-U&P-Эr*vN)UYj $IZd1F+.I"57c88'8.qLMW0d{Wۗv%Orڭc'"h0ޝ6;2`LAGF#h"-xct{Hɐj#`TՑExWێfq;wW+4[^A+?X)WO[= u0ƨL!O Bed(3ƚ`B*WUƈz;&'^6J!԰UMs`?ۓth>i^Iエl uK LI%yʪh5xv³k:ǭr}޴3x:|ޡ7ooA{7Lޙl2rWUQ>撎!eLH61yEaNdց6] c i:]R%%q ~Sm+w1{V?Fz|s|,m[v)Gjki»b<)Q␌|I,!HA1>?)w'eivY{i,!zU`U0.brtv@օL MIfb24Eۢpک`#d#(dRƌȷ3kTb^c{y:w3,~_q#&UW!˝dެ_Hszhћ.hc9`B$( EF(jl%-thCm{ X2ITJ$ !xf3MYr49%HVa!HX1rL`s*&!$),vl""%Ae.똞!`|EC初u&FcaRT@~K*tBj6."I' h\RnmW3'q\6FoƤº,)%0M&]JQ(DYo#1BIv]X)NJ^Z 6*m#j9bL}֨0Q($d_T(gХuj3qF2Јrh3Kzy%H)OG7**YZ>vs c#t@*?Z߮@fuZPR o bFPz>eZb7hNֶ%d:=%#r& )(')5(A ( d=crxsEetuK㭯eZBc%G`hhFKVq;9H,-**%` )Ju\9lTL?>$,ڭeڋ!wʘ(S$d7:#"8 .ǐ%DȚ$RLV(ډ\RVւ[C: ))JTĢYZSv8L;?r\2+Ϛ~^/5hQlqfL|zG?w\kp0qk4(`X ^0D_6 S= rOi WEd.ƈ*Jef֔*RLl@D]TŹXf:jڤKTjP7c/ hI7**{Yb6 /\W7g4ȫpyu1<+y>:˞]!~Cw}i_2%A)_e4s .6\'*MV &dZp$iJkZYY>=z*!P`^D=UJ3qq֘xث벮W/}P>) s=@9:VǙEΞb(D8ج TF)P(s[-c9(Ľe;qڿ'{c?}>v|Y7unĬ`nFc:Q%\u0YD:$JA(E{%) Mѝ7/XlQz>š 7=Π/?9Wgq:k߾7w^+e]@(VQ W¨%ZX@]-%c{x 5;8$?{F{4ǝ73G=?D' 9z  A_eVeV):܆INH+/Y J(j!q♯"II7J TЩ"mx+P֜INΔt SlH1pV¦o , <M>=,)fjgҾ<-S%.gxSXI} F)7ěJDl0Txfg0ŖzRO<Pc`#(Zw\EsBEC0A@ʐ@?;9|;Y˿Mޛ~ 3tDxDu΍<߯2ƾ/oP9[Aџ(E;hSSK:m#Rd1 kRtAWt10coDWVʙíx'jt . h|(c$rqs.IarQ[ZEiÊ7g<^[XG$P”-?7z3})ˋyz]K-hv/9KtYf2\`J u~͵0ZK`cKY&0C c(g٦eb`Z%U0- ‰X!5R .Q%֕lj0~LRf-+yG'=WE)hPTni>e娎XNR!]sH;_fjZERQ(“4&ptB$$I̡R ҆Q`G JVgL%KI<\"24C<EIe%V%;1)W^j*.͠Z8VG.vqY/OYᭇ+%?@``R :H)>8s=)u[hA֋ ۋNqaLXߙ2;}j(`_S8<(h xz>ILN`.Kx5ԁh 2!p[7L]wk#|Jm/A ^: WH׳O>Ý#Z`쇕RAVM)j0OMm[~LFIr\Zo'o.g/o\9sۯzӳIr{ݶ #EIzo޸=HYKK%/+--FP6'hAlEլGevϻ{*'fdRg\V겺ZյLp=d 0|~ f}9JŠ*6>-q7S+T}Y?y\ ǿ=[Û'O>{3_) ~{w/MfEs#V)Zo_\F7Y%ɼژ[ /_Q@π`-׳&X]TW6ym).GfQ6*# 籦Îqb4AR7~۾>Me#gq:(vJQie*o.:UlS^`rIjm^lQ^YK5qF:D8G J $0)V%$/GrjʃѢ=4hccNFӛp2zSvvn;*n;PZ-hBn;Ӱl̓w*ƚo) { }FuW_Mr0W~΍>\bi%WO0E>aH*hOk gnYxPW4)d]JSV嬞olZL|^M]b<{w X'W3!90ezu_'_obyg䨾wtVfOɵrMȷ/&F3mF̓ Xcs.N'x^3I6eXkydaaxuPO%ոw1܀!RJDI+Jmmt6$IlWyUDïWs rٔN1"VR  $gLx^yϘaVr]ڰmk+l3>&# fgY2@r/ e$Eut'۶Q5/͐sː_x|fs`^_:|e(=o Dh ƾ6 %5c^seȼFVgc^\h.55\Xai13dDWؒlJhEȅM+DeKWHWs=/K\13ʳCj;6Yks/Y8ſWpƓZ4}G;Ch}i9W\zK")Wz_q=܋G?27LaLT~&:ScםAسFJ%"r+8\ tBtte49`M6tpe6Ս+D`t;te-aZgDWgDWW\ њ{JAHKW ]ˉvZ KOW[6C+]R7tE[zlSefBdCWwzh5o:]!JcZAb82( ]!\#r+@k3JZA3+LE>tpe6 mv(mk $]Eoo7-)BnK}W$PsBA/볖'_R酨<髧TRuuFǗhYX2oVFS}sWuY%ae*E ۲b n7BPRCWlŮgZiv`n|=FoBih芭AWv=eT5`gp=6\ m-ҶttŘ:' 3I+i.thi:]JNUKW;HW)XFt:B1t)<#BdCWdc ZFT Qnҕl!(qPXpO$0 eA|[9bI]/xt-k ^NEV xwVWt Dxu Dec젎 e-c\Md.thm:]!Jٮ"]ini?{p r ]!Z)NWrᰨv :|3+ki6cVԭvtefDW|.Ut(M}_>JkW] $[֮C˶tz(נ+c Ȉp_l ᾨwד JtB]+&2#BZgCWd]ZAx Q6ԧ^y޺#Zp9am1(ijJHЫs+lx>tpU6 ׮嶥]+] ExxR5 .DYxfu~+\nYgJP[֫`ͻBj- OBUgkbbMNj ځOwג+`ؕ .7?D~.5?^9i@;u˕CB m D-#-#)ZQ hm7Q;FR\vo!\͒=5t(-iwfEcJW]\AѮ-M+D)[]+9l ]!ZeNWҐvD]`i6tp-ͅ-׮eDn t%VzO l<޵$_!}[KD^4v0Y`/n Z"n}N]%b+,"y23ĉ:Uw? QІot!O ]=vuljp4t5~[? Oʧve߅LpjeEW<CJ0$tZQ]q)juXj @s|9 ұb/gc`=ahs/Ӧ7\yo޼d_s9ooy>nn5"kpB66.__|U&v;ry}xAJfu`9l蝍XCة8~WLl~|sY>_S QJ>Sz޷O]"S^FH7+}.o'wuË_Oh@Ey"Eiç8 E 4|cV{# qˑPON)riݖ{*wJ-{=$">x}j?Q_]z6eyvQۯ7p)9ӓ.JE]Bɫ[S}ˬbKQ+YG}GS&U5f1:kU ͰarTkņUST'.˩ YOOBC{֕Zɷ*BE:V,0ߎI]efj`̉0!f-Š D[K!BMP![X%kbDMԴ0j]^u9E"gqh1h顁\Vo^_@RMR̥fcהּVYPR1؍q ٹk#Mj!SRWs_ឈfd>K:1d]=lҹj;wo;d,TD2; _2{ xB. bjO(,Q!(}tB ̺pTIomsyJ!C*i/N{ü 9 9S,YO/Ss.~9oNB#NTRYTC))IW6՝s2J+ 7}6:9DkQkNҩYJE.)$8: AOkc6aAzQi mDkGrPR<@na"$\m!jT&"vֺXl)6>]0dTW)鞺5)Y0f;DrNZL3%Ġ:*D{j-xԬ;v4F%z*/VN Ln{ǁ9Ϩ- Objb |SZa]S68Y @R*C!VeWTHcluXBՕ)Hĭd 1YZWOWgjm x2.a0 V6yc֡sV ȡ m "׎]SPP|MA'SX|.H1?'؎yokҹZe R5RcM ̆vmBCl\`AV4:(576CiU2TS z,8I(&` ΫmJ+Zu%fH57]\Q?@ m :Yo9q4U9ZqIcLhD;iש;".FQC(d9ƒY ӄQ!6KQ ` 5QI!0Θ¸ FBk@9(y+XGek!\hVZ h轩6әY)(J8E5n,YgyGD)Z6NjA!N)!r{&[0 ҸyϨ`[Qo$$2%FieR"2-5sMָFʰ&4Q]ANs=sA <{ \[t{ŸKUU{:E' 7:*WeH;h 9}?C|de9^or{ty6.7AtjA#=^}mz`-dxs:p4(O-:J`#eIW=$ B+ut2: Ρ&i|Ѭ`Bgąs՜ (DMk 탷0iFм&>yL_6 5dmeL< o 7<9JrAП`ZD"X52}ׯVxzGyP'KS}EHl2obM=%D^jr1h!/fs7DRD :8l;w5c05,g"<ѳV %v%)5cEXA5D/#+P(\|PS`iyV blGj31z< 3$2v@Gjc~,AJ35VEٕ ?AjDлqM*zV Xt&*BȺ>fnqIuflNX-Z@3`C:b:4Iytd53+io jyzrDm i ovAVe7 W| V*A80ĵU 9< cs_^ϖWE:_nӮVnt&ǵRAwH7llrlfѓ Kk:4%Bg'U2b1jnT8֚BԦ'H@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 l@Gph;J6zN ޟN'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qq@hSrpQ|4N ='P:%Nq\@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $Nz2|x8tz{z}{r_Kku;~QV04Qˆ\ߺtKPq Dž'o\"tQKַ>;%# ,؇rdA̞,SMd7BT3P|Dſ*[_.VݦPjKm XՉvțZqp'17gہm/կܻ\~ +_$,/~V]Tz~@?+7ϕV~^-yGi<ø鬭Elm^.?]^ѾJބ^=-$(^Y4$2ze @9q틯^wvy֑wzqlW.jMkr5zZQ3MZ6>ib8&e̊(׻cQF@hDeco`<܀Ա@Pҝz]=إ:"c@@3+4>#+^ ] kUtQ?yu5PZ-t *:="C8pjc:] wձ+ev;WW?e7~vq>muq#,q^~^חwmH_1-0~Xs{wf~ؙ`#Dl߯zXՒ,S4rwȮ*~YPܷgjѺ?J`DfHDJ(,E߻0rѹE{yi%ayښFPYO fRt}ki뷫#)J 7]p~8`C&U`Hk奜=JnG_ VJ RmL^(7)9;e?A/ZYڛuF=2D#*9WyPA(Oy:9لmIZ`qΥ׊fxJdV3QN(kQΑy Wp0bt.==`" ncR}zJs8ӆ3BDrHyIR*yK̑S kPn \(UIpD)1q"  2i61(tx"5jvVߨ&ΎO&7Z'C~=[+EdzAoX(> Ϥ20RE[rpcaP 06:V1q\?ZCԽNDu1{[ģ賫&0%@{~bՂyݑ#.a`[eA>NjTF張DJ$ % x+|φs8Ą\JT4S/ CaqE/r,/4UYHCA@S 3 \7j*}^d_z(]::::R)wxtԻ~I#"uYE Nn S ;/O@ uh9<󠐨ZL[VE)>hSaB5d!L*'enFd@+B"Qj# |ΧHAq.K3# YkCs vRjlD A"EDMp2Fͼޒg k} _3 ;s$裕W`|޼'ӿ-_ PǺ f3uDĈc  h4| ̆na=)c%WI[c^J'MQPfRr"IH%v &+` f bcRþ4޺\9i `-z:|tHHf+JWsn_M"%]EVSOWQKW }>>">N 6l`iL>>ݺ $YjKZ5D*8o'ƸRБ$#H= Aٔ8ɪU:G%WADU1DxY :"tq2U%&%V[j2R,5Lf"G3x'S p]$Wg@.Xqku~=t9h}Әrp:93uhZ왋c%SA*@  8N Ie+bSL4ÅJAE0P;̫8 5}5ZȢCo_N%H$Jj=E74[6>k}6͢NNrKYnrvn˝ ݒײ[+HEaRd-%AWӜe*+N 1fZkkcLj9ѧhQo*- 4 66AK &#e.]N2 +P=e8=6 qb׮zmy?FXdj'Fm^k4e!*td Oԕ`vV!fJ*&!#2dYtɠ]jk}(x_Vg?lH[q'kǾQVGX=hY29I@+#oD2+35z{2Q"7NJ FLL:ifD! o |\eXMʂ^&|Հ~qLfuJsմd_+E/nxj<𒃟ĵ,e!GdH UTY:Jk r//?CSY.l^/IjȭbGfO(`#k;n(}G20ar[! 2DVFb|D,H@!ua ^CALY) 'iWQ<{#r,;.i0 eiXCy 4.!Ṛ ףL͞ܚjQa] <'f34(_M1 &)Gd)1*(.:\xqvQ-Kz$S]  |cL;YYQ>.Cb؜w<|o[G49vr.5Ky:;Y*mBȭ9[PYxCR`7N`vA>=hY8]0ͯ8n3k<7t{27K_ /9&[p>]Qj);+8Y2^ S6:m.Ըs c8cym )\Fg(cR3 IP#MIUN32 b&eY䴔7m Sp"QX)N xIɨ@!v$/WIɸq"/==32c\3W\-_Y}?mz[vsfpPߎQ:n4JE$OlhTY(5&k JE|rOSx?|n8/~0;V?\C'm4evfRcQX'bWQ$cW}%R4ْHvWW:Y 7^0i)ÏfpF8C7;tߟt@P ђ!ɢ!ϾyKd8vw㤑:zWp4qVJ [TnHG yZwonN(tf^DŽB֯n8|31]Wtwxo{Ř808iv7÷tF']tl;kSN byޝd۝|EOj :_N?םN/9ίoX4`=TCVvqrNOOQRR[K]-rbڗtTN&VhxWB,HJb OSrce?۝Q"k |xkOIW.JEEn3o!0 1++^/U*w Zk1MKJZηx$ṾsY@Ogk}VڬK*-]~yЬ;|moX>2HUB:V|U>O:6~5zL^2LRE;f@[ XtgYČbhtkVf {/g<")'5Y\cDž083/kn*.ELLY+li*z,%3펻] Ǎ[I6=띩1k FjA8hõL,:#Jɨ”sl( "(W IlB Oc*5( .RSݤY1nBMH/'j}anh iX D]zpC$>QDX)iRl˭ΤU,`*EvJ/,2ıp{"hD[=I(V^9}|HV Ϝuxsld'M;"E RrH@* J3K`hMt{}Lx۹z8}-Ë֓\4ꢹjNjIs8>?.u=D6z=۩K:p&~/;kwיk/8 ߾I>=9&䇓o_(8` G @`l o6?e&ꡩb -u+KpS^0]aM-ٶ(=~y?.ywōGhrMЈIJ3_`_rpUT?C8=w YNbx mit$:/\GF蹔FHg5$Ze6ވ6!(<+k,57Ɔ-w6DiApA4BJU Op$g8@tR:Mт"w:lQcgbM'&ֽfgvX37sXwyN4G3<Nj^4PXAYJ/jyxv-S++zd U{T{c)cP֊K\堂Er~o3; fNqS[b0'q\A }ps\3yu,qq5a9jor:|"lˋtҭ d[,5nf#:xA"@XaG. )QBZjK6hR‭iU5[B:m=3{9AKvJA.r&~.gzrʧ,S'j0HAB|%gWTKx!VKb, 0Z 7ilRw遲v85yur5 ~ ׽fs:Myǃ=q3h#C[JT #2(@F,0&gDZ|xpX q(:` "ЪmqqҼ\ɲցUn"e PQZٯW)(+;oUoN;vިm/^7;,o N~}*e sevV:z|inzr*q_٭oHwqG_.?7U{HP\!3sãȵsDIidzU?V\FxKZf bƹE̢@(@DL՝A$Lx,9fZJԨV-C0!TU)0h2Lwڔp'`ZFL&Z iI@\4:%D!\%r3|Fϔf6r"ѳ1RˡjqT=?j!}i]ߜeƅF[@tv<fP.CGlj9aG+NO X؃<s-8'"D9|Pz| ΍`^7 gY,ހnͦ?L"铪bNSiC٧BH:sHN> fTJ?>D`MjETkX[Ѯ(j9ͣPFsv (S sIts<52L%fnk)5o%tG"o@YkhM5 ^:|BY"o#ySb3ȿpI,!1R oDW Эۆ]LABu^nj/l˖NMک{i< z%o.KR<܎dKF7SEk z+hӯr8uV!ϝ?7M7uH7v3] T (`t"Sh޲]Gwe(X^"Ԍ]nDY# D+qμEX"m|XT\S-_U۬}ODaG{tdQC+t?*esJڹm{y'e=~Fi&}`+OEr-:K .finVm!2a0& ¼&BPPk%a𨢚+A`1u[M)Ɏ-?ALۖ {TR;tgr\qO$hl-}\΋aJ)&[?Ccg.6r^Uhe'3 ͓!CSj!jE 0A9h'ClL.7hg1dgW2۹}}$e͸vnk.&+08#:F j>F|;%p+v,AIK΄\"WxgUW]!WtU=zJj.IW `;*w\Ig-%W JF+t( \v\%hvrPj$+x`S*KwvzUR=nȕXqb).Ҷ"~J͖`%,>Ha=?<^s^jBt;!dE8gQ2kr0GDO^*#`2&Ll:Yd/+i"7?:}~9t۳ WQ<S0.Xa9vT)`-,2DOBJ›,bwЊ=g<7]Oߘ]c5]zLں|, wmmJt%/r&Nv:٬ص/+4M86f4uEdΤ1 L@{ngogW9?[ ǃ!!aTPok VhŤg!P`<e~T?zw;T}w?q6PʺUsPUYwp) ǏIJ=ArZE/v[=N|ۋ}rqHzpɺ]n|W7Սwu㻺] n|W7Սwu㻺]=0Gu6D |LJ?Z+gWATrz`'JvU+WږGX2eNyάS꧜VD=-w#ڜSs'yZGV>}\ks^ZOE:F5x%R>qcAL Y;D9Q2T.:ik*!9BH`y?&<Η]m:ueUƁ0kkmmU|q)g%[Vv$3= %=~^mUҬnn"[7YL&Ч_VWuzܨ.ܬ>VfW)JrH_Hf:?Qʤ:V>8id^yvh`F&D\LV@Eg'[nT@ӊX1L 1/(GXDB:Qu;˝^iJGi#g(}HfqG|I 1UѲET$GmPZd)92ǜsTD-N}|% Y2%͟Bt (DQH4kM blx6,EǮ^JV1>3Hk*);Ϯ:(zm@[]I`5KRvQ@ 6Д-+A4*a 8Ƒdx$#{4-}N!$IK EFe= .8ѮRQ*KR9ۑR qƾX c!IpIQ2[k*K77TGoh4s4_9bSO,&,gDālnEtRB tQ!fgϡĽGhȰɕ%B0`6脾161a*AL)Q'b j}QwY=5"%&S E \Iφ ]W"DeDFHb$E0Ix#@-C*Ћ5bM̀B1 /8 H1Q;Wk> Cx0>})EaD"v51cѲ >EApD\s$ayC`k(AQo&6IЂ3>x0"'̈́NS QaD,FgQ#jX[:b\/.¸(:\pq{OΚQR RZ)LHT$NGjFŧCbܱ/x'y|_ȭ_6؄Qю;uяR|sD~s[ۢc a@#A2K9k}MD |0H 3Ta cxâ0:ci%2A@G ,i ykFMXv5޸4Zl""Ƈ 4(93G'%)wM>Q*x?73N2R&DZ"h.-BB x^VQBDJ0X*7Ea+:y?%1֥\E1U'"Yopp t4ye:`x`:}DA[@pB p¯dP'!rO(@@H\ABD X+ h9CRrc(we#E YBAV c|#fq/}WZp]TsfjҾ"oaP5_W<9RXI=xՄIqMTJz7AEƚ-~adzxd6?y6Eh-sw\s Q""x4DIxC"ȓugN_nB'ό_g!,`1h 4%О8K2A+-"yYă`stIA<iFfw?JFԃҞo' mt ѯ&p_9' zʃAK d.UP3Wѥ5/4>t`8`eHu \SE]@sŤg!Poi+%&xCOtCObU#O><׃O3 AmfKn*۫[8w7Nٿw_?4QK7{W _E8-%yV>q}+D\hfs%!sO~M^(9FXS^O`yǿHקS3ߥ,B/qfqFĪ.$YW'vwBw/En%ˠ"RIvQua3?ݏ(Ԫq~yѩtWo1[eO(zX>֌3\%7A"CO Xs?HM/&{g?OVɭ̳k:^,4{Sq'Tp&Nb 0N4\g w'Df5T{Pp‹gV)B "s%(#$V2MTH C< 29Y顤Y,_zvc!SXrr抻 ϕBf#NXѮ_'pw^LW,72nK^XQ&/_˟\2'G}r^=Y||=}UOLbkQT(^(5e!e9nZj}e.,/MGmFP2 P)" j${IhfmLrZxoi PAТݧq zvE뗵k:,62ꜳ,^nII_U|@/{J^.ˬպrczr9[:Mg%\=OGdb]5S J]Yv˂pEXc^\{"/I c zd֮PWx/x"loY#qi֒'0- ‰d`TDD m.[% {<6QܳxDZ-;{|^)]p4&q  :0c9I B:sqG5"H1l0G ԡqZ -\&wMJQ RaAta2X41I"ȁ@s-(@NߘH!餲,~^r dž+"{E7y}vkQ(!xa_U'kP.{UAoznM^^|+D֋ (4w=l# ;\nnpxdzvtx\E@@JB&@dzHTfV.ܵ4C$EL9 y L 7k߻qH=KfJLpVe< &ŗ%uڤZr.nq놏&@#Ǎ*Sc_CR8/2sܸfih^޽_,AI'cɴvv =i5x'wms> Cy(uSKv:~P+7 S aRJG#g9ăT[9h`&gEWN+6lmNhn'r_s,;'ef/G.^+`U2lMp${7IpB>IpM :Udt,8HZp@p^d97J!K-Ee6QKG4#0)*3Jg9jRKf_~ssAH y.oĒυ+CVwm#4FlK #U\ڗ*:5'F9 S Q̂@#)+R"*dV { κOv0Zŕ9S9ICõiF,$+NԴm映p^b~CiZ]o>6]] Z׷=<ϣ<;K!߿i#HZ$KF *M<0MY&]e/T\ǧ9utW~,]Kd|XZ0{,ohY~uOv!5r3ىm8F PfǯWwmbXAHYɬ A`+e9;q/1zj}H9B>М0I ϟR~z MSq$U /G0;JRUKe9";Mͧ#s8 Hr` iRBjD#nIgIdf`>t2U< _,E:B0 )%e۸ @RIi9+7丌);k;d[ ݄13)!s:OC:MF)/ LSvPv*=;m7t9yS1٧2ouߴ: ]9)k핲&G=xnR4iRb5u\BoY;>zaҭ QI"ql/Qܡȅm5+'U&[,&[}ֱ]|}䇛(7{p0]44M#xxk#6F-Yd8A#Z p'd50쪃}(lyKkz}9_ļs6QX>iSFEM1@FE+=1qnujeЫtsD8=Ibs~)ymΗo<&kEmf>^|.Wi'!^ƈ/ |B-pɋZVK.AZp f|ZH°;h)(2I)2Kk(5MGxlMF&%/ Ӊq'ѐ3TچL.ӘPm.|fO}ue6 Z074zxeE)x8ՠ-+\YY(* VԼ,ʆڜ8l߿8 uꮤHKY0Ǟi|$m(v2,!T#H@U†Cܧgg}'o͹NpE ʨ N ,K)8NX20P4W>8GOӡ)p@8M-r0],o9O5\efz m=+:::2su+?nҘ|mLiHIbBJn+c(e+2zi߯jӮS zYHbK[>:yUF_tL_.)֤ dNj UQDgYF tHᲈ,eJ k9VW;3X ?^\9ɟM5bdXۦöHEw8+3=3muz.|R21HR.GbTĸȣykOb}Q;华JdFi,72\x jhakjWoz(11T%yD^"g-"Zg)bVCf Յ^I{%Lrxqv]-)&6$ -lENKDc,dR dޘ83FSX.f.%945gLcj4jr4Bk5qvR AJ9X."sPRG)qf5Rqtp6H4,Dlb 3?a^Pgk$U!Q'c:y l2~D}pshdy+.ӒruL 2Zc1&U:Q/sHDƢ :Aŭ:d% Qed)JU]c'Ɔ)SE~{<۲AC[lAUx$u\ NPH)5ub (w m:mܳpPYRoƓ[#n\wa =B3Ew%M!v5CggxpbɸċoKuKgO-w޽_??W|cineR@uF77T)PVj%M?~a/Zg][ƻ+xi;u"6ջ zI չOjZ<Oj FiSU_~uدa5B=eQa:W_~uCa_~uدa_߂:(ڷ)yg @o9S3BQ8$4""cڤ>.М.М.p/顴nW#Jad\Kᔕ|!y>=du2e9 ކVnj=)7֤_(E9Gw/ e_5CBn;~lDz]Y4y[#CEĒ ]L'# `spJҬAJtZs! b2m5F^:2Ep㙄s5oMZs5hwk,rG !-u:YG@cgS֒g"1{sƴi1t<T RsGd$1DEY Zz"FEЪۨ CN } {KЁV'S.ygÀTiTncklc-dG)wh,Zΐ?0lU`YLĢm*nvQi,p!?;]J b. nw.>-?\Wy]ಚ#7{M *Ez܂ױQi%}Lw.ǣbs 4@[ +Hp<"hL  h|idh|h|/h$02 %yJF` t`Ih ˴ImR>Gn}!ʘLU g(قɎw|;giqDk־36krm++E-ghY`k^MBI2ZJfi%YJͻ`BIeM?myT,u_M)J m9$x "(yJ l{Ej,7s-1hOgѨdp/A+1腑LC 72q'ЃSY,G؍Gͯ|ODS>NfM ~Oj9ayƯBͯzb.^n-1%-QbH(5EpV*! BZ;`+C$) nC$bn8!!rƯd1(N3"A{ԷOoaWo=py\õU؁%WS[!^-^&CXI} F) (%=IMT`X7!xO;3.b=+C?PcE`#:Z5D̈~NH4 # EDBytDIxCV'. ޟ9kCZWFWoDhY-3CmMD '!RF5BLJKH^A0:Yzg+kf?p3IG9EmGvY@,B/ـB E9`T2ROyRQi8@" CJ钎t?ԑx+cF,*"8 KɈQLz#%xǘBPb'>82m*qanoK[g_̙;kL{ëo?Mi9e ڃqs/Gz˿ kd<@VWlMlUEXӜ\]~C;!xRΦg/L;9MZޟQr8J|'b+e֙7)8v{7tPUwjg7:j0>L]&1p}tpvqO\ɕ6D71f&V;~Gy ;ASt3 (85dߟ|go7WV--_9FېokvLNig(iLe[C=~T^u_~ɪr[aŇٜG;V;%ʏ7wBҳW)ѻO y~/bwJ+EWff;j9н^Ts2c|@!Wρ9ǹd+KTo iBY!8E8'^\}IH^/{ŕ? y>^Mɧ~<`s\Fi<5s]O.p壂K.|N7$vyfä*THկafto']{'==^NqIZι@xIe*u>^<41(MmGAX0(0y@k2wPznRC!iz~+;銻%WIjb.M,,_ )ѼxfxDR͵cQe݌ >HuZ&cҊKmV )a5fd/jby ]iYQxڽޚhc[@u}VijMŐNJ) :s4:S$VגAP3Bfn!7{7<}: QsH$V2MPJVKI!> 2t$v@/k`HE ɾdmM7^{Uqϸs'Pʉv'0|:*l=0XS<9Ok a<)G(¢_990vy9=b[b=SԊT&;9N`1F I6 :q0X|%:Ye(g.w3!>$qiPHB ;\8-M1ڲcښ8[^--gI)G95֗7_DW n[V 'E۬P+U\{֢.IQBHYh>Fo q*Ɨt(~˝i,H,0%OAhô@$І 'Bk4-[Owh{!cqN9`l?0u -@{rQ e 7Jc4X29#3HJҩWQ::miCQ(ApqZ .eNpT4*huԲ!Ch ʒIA.84E :N*+ˇ[/ lf r4?je>fZLY2GyW֏<"3/r񫲿r)|`=i(fz~<]Q; z:^v*3!ЯT\&Du|Zqtԟ]Nn99uaت6k,I]]ffe+YVS4qO8y,> .o\fk:'add}M'WVruce^nFr4䪯JS/FxѿWjJ+{a0@uǷ{6xӏ<}g? pBAؓEi'p}Ӕ44͍ئifߢ]Fvdnbʭ OWſ ʐ3G~r3kbD<WQ'9ux{PB]E%w2^v@ YT^:ᱪ%bm|tۛxTD5F31ES{G%o.:fT{!^zÞ4x g Aƀ)87!#$SǠEwtZyW$tbG6HF>zʓ0sә[vmp.vʚ10" H pt,%SD<c}!(7A'=:F!+1JB#ڜ 0*y\$9W 6 < 4<`@)m$8Ytz @!#'`Us Ay!bXgYRy(M\:cuعDy}2eiMHG)Oc2:\*%&aA (" ay詍+G0EƐ!@Ipzrż}u;4%FõMgtA{#GШvgc<9SByD뎹ywޮ" ^f [pE-7Fp,/?ZEc-kplY=>L1Ի@~𯅟eA]¥'A([ڇpIYg e2(M$sy 5 ՚ SuJ.%%37xBGL%ub<L\ gWBAжʥ 9j Lb8%Ƃ2Rmx,/.,?PaA}2\>.::iv '89N7 *SZkv'˧c'`0/%#HE:71vb独lfp~^Ktv8=rd`.S2m&Ҧ;4| PzHewz~ n!Svhbc?naHMθyںƌ܊ƈB"fqѵ%{D6:ooS3.8 ]vM<%5Lf)lcU?M-i9QרFMMs*p+bMн"]ro4ҽZ׌iڤF"EFКeJqi4\z2l2="Օ6no>mya<~@ 0Q>tύ RW^zBEל?C+ZŸ}{^]n;<mS=mgix;ëu&uQэ$Fq6|qS QG H|ʊ%Jw _|˾8Otw;/([ŁW Y=s  e& "i\Q 9cx8p/"Pd3>&EVWqpID58Z3Tc~8/p nXmRqmx|ƕ6DeqdFtN1xTN-p,kZ5 hnx+uk<{`.c S"=:NR[LFcd?03]cʃ-fKTQ"/3oܗ8(.qAtY%#&zN,2WgBTUv}/wݻ4b6>~L+vʡŭ K"'I %o1W)dބ4&3dҶ! Z3F[ 76w/S3j4jr4Bc5ss} AHHQH])DP)"RSrkB1QlZiXщE#/_"JuBųE3?FRuR.0p]"jMƏOntv.[D\ErꖕGc[ccIGNl G(Et[uZ9J:#R::4)"=~hnu nC[1T݁grjcj?݅k\f: z- 39#4EC"H#2I6i 4f/p3F&NYqG8 CYIFPivfN}s΍I|ZH)k@ӑP I]׊S ڲh5IYBK/z#Hc5yA;pG/q[C>BLZ &DU܎0|u<{=%)Q WP ـo+9 vu9.%Zkzb%5e § ol O?"ӷD bWq,-edNg#A.JnoId_Mm=X#]֗K )5)Z)CUْF tHᲈ,eJeȱn}ǽ]1HפWW~ׯӟ ,bf9NVwemږ+|egbZNύŁ2OgK:ېTJ )hSbx"1\Uu]Jݡ{lH kYNژAZtAgsf *w* y#MkI[;|YV7|qyV$pV,RRCìž$ 6Fr[cOsL4A{f>evLGn)+vtukYRQ`IpqZI-\. 9E`%wՁ@:&F-, Θ,q .f=Hx$Lh"tRY]Ys7+~=ȲfX{a< m]MMTlM6AI#(/<ʓS*0)TA3|4]V 7ys|9шo?G -  ~M_?~ꝺ1{sj|yC"GJhnbiϽk@O5h(hMщVS\]Ǒ^sQjl;jbtWU @H.XRM]\qmy Y̻d+K\>B~Y{˰ѧG7Y:͗Z /^6/`z/GCӻ>|:p <|" "0?t)/5quQ/'2Zr-TɺƘ({?n EUf#_\h2p= j>Z ^Xץ2]0B / 5%! m7 ԽC]/G;KHq݇מH vJAp勤2#ԁS 6c;EnI䝍tÆUyQG 9F)87MJG,$dSE`95)y4ZtwSNR7[/?u7;#Ҫmyv,qvO$,T*!Dٱk#r:#;=e*xwd(xϨY>7 0*y\$9W ^I5v-Kj{% 9ӤXBAHrf6TY4_*4&tr--C;{%q6C>v<=(Uyެ+<_щX8-VU^ G|KWxa< ȓ IW|J\#Nh}H4ͥM)FuJ$Չs7r;#r=H5q#2.%eH \E IJ7$J(20 O{34t bf2Q I34Zug LeqO(K)䟩r`G}pB8/N}`\-awi:hq|R"hUF_ C$@WnX"'Oxú>)E= XZ:!3sBp/\aO1Z!H,\0&<^c{@D B 7gV^]DOͷS Xy>q[[-"MQg~AyQe݋ȣe/&rVCϣˣQR0Ê[ģ!>/~,V8e~źc0V;R lO2)_H6d$0W^C࣑I"UUP&HoQfct1*!jc/(9k`, KYJ{VK8w هd5? `pEQJ/}wj01i|,&9jAi4f>ݞ{".вQmYNDCj@!d8Y'$qɍTy ^kbBaE-&nE'^ h4`_š8i['{:imNZ{э糋&Bk&BϷn1F\>lqLnUvK-ʻAMd mV nJ&8@'8 ^&Ɉ^= yOr9e $)L0 \8W)(%XLXb+c_,duXX(m~qKæ؁a>'#6 h0I=cuNzfM!C3Qx`͍NJ|0!fc!u#4dJ%B0`tBێQXBL$\RFl?Æy(]luڼC>%2 ZjEJL N)Te.,.Nm6&,z4] &*k$!$vIZ @+:k Xd&($1gL%Cs81W30M_j}싈0";Dܶ5ZhYJxH z.$,1ϝBt(g %h3 ڤ,eQEE5&i*B[sXӈ#.R鬳*Ua\.vm{O562WAKiyH21" Sɒl:ɐ8J5ڔ.>. [.z?<|[{QGީ"_`E}'Y5P#NeFk?Gq5a~^0i%T%W DxUp9kQB  wO0c 2&)7, Clr1)q tԁ%CF&5TW3* {&b%$ oYŜ`1Υ끋Ĺ̻PMg?.);w:/9fxw fF*w6?]YyOMJoy:8H4&Y КCs=]$@[ǣ.m%J\ђDz,ZGsdCdp# QƤ(dPܡR@JߡgʳIM&ԖDrW͠ݦXXvEgY>e)R8[ܲ0Ak7RjhKԼ>mT -J* 34}ts\/juw6n6J.Pn?;^k6M99x;)eJ/aƒLC ĝ- pn^Zͦ HJ?rqlȯj݂~49ijAo_l6})7Կ9}:;51֥\E1+b3NplTCA˸X\pNHNQ Zp2DN< H"H( TU:&Vfk9)ꝧlK"{qt:b!% syL>V\_w_]"|Z-;2yxӽ~=剕O0i2.rCrq%=IP&&qadzxd6?zZC׆<Ԙ@, 2{{ 0AȾɐ? uSy2]wa?\6ⵞ6 aрeƠIm(|,eT#c㝡SN(}WEޕ  ^e7Ćy!d!Tij/|>IƓRrR).d.UP3?E2׼iL}}+AOAO⍐7#쐢,QYLSi>NQ?ч&?Jo{i6N3&/ֿr2/'0"q`?}T?lgu?pRBoE?,E=<$Npzw9~Ubp.6ZN/CԴiAa1aЖPBɁ+v6]ʹ:lȴ)j[ROz*[PC6&~~X0m,JS\ =ɧ[m.4 OQ+Re\ =8bb\d _IJ=A޲<+ЍMO6ǿAVOLߡ} _Rzǹ"o 9l_J=<Nj,V_b x]ފ7;o_⦔&oɾ ],Tdρh2xJ=v9;p5 ǕvLaR +Q/(Y?qcB]zWY0o,7[8AwG׀ >MCM|I^OV&lMsqHBK.㐳}@MrX:P4X#)zL;~Uchhԋ]^ E}b? 6lhw׮^T|eQK7mg5oo`k_j>Yۃ/H^c~gآdKݾi/ xcy[<ل+\'A/1cKm~K6Q䪗*@󺅀٭JƱ;K|\_:)蔋=ɻy3w>ݚi0Z^ݓE6v#oMWAd=Vd`dxȰIx}9t=u(WڧWsL\iO4l'nBv ΁ɬF |ŝKW߲!CSp$G`5'ĻMflbͳLpB[ˎ *g>OGBQ#1~jwzSpҝnp4"?e4.{VMe(G,RSyl0J[6B){e0:ʲUF9s 5OشXfCώ B.ggg(W(*UggȘ0;3KW(82BNS\+T)yU qeaB @.軅pj WR\ =r; {m :FW0vLx\J6aI ;IW(W]Z|t*K+rMhm]ll`=5ZjlvUOe)W\ա疱&+ sa *AWq*j!Q 6 PTp%bB+TctF\ISp'+\Z|t*UqL-=Z +TyUq">fz..n a5D k>L6(r.ƣ^q㢓,'gR?~:C*Zs"C/T5D8B1VF1P#SpS;1Pp1haa &TR=wwN)vڃL&7j9wMnЋN+ȍ]o׿zߙ)y5FxK}&T::ַ{Iy7s5rjf.z,[~HXٷ 7ajV^yzVd_Hu&cQu?h݃)c>e0~Qeo=KVx̠oԕv) `Pߙ@T`_*, */W'e8}x_ z·'w.zEÿ͝btvIr;Zf$ 6!tp Qoɛ5S P05٨*;e}CU_o^rO]Or\ ɫ0B1%- 2jt1|Sq4[~&2-࿆s㞧rfP*)ϫj<۲ho!?RL e O0 +]p̸K'~x\Pa^nYat6!F'C4yCw3Eޗ\v|7{O=t}{ch6t 2I@=cw(Wh^lvl)KCBC!c͙>X fק{w|"F;bOMX|OF !&SDBRQs^?F*;'(/STR Hu9QOA:pʪtw|= UGoײBmWLtwȖzA8mz?O(ߟeһnCVZx)]q-NQ0Njy8`+OVxɏ Nѩ_ JeS- V -KSyO9A#Q9z?'wprI1PؤYCbSl:OfHU !(M0GM^ScT?cTJb5P/-AnoS=e0X/{nʹB[uij .X)_ ƢIq0/B ,WqSy?v_4oh+$.[);9HyƦ}) s =(sz>_&[VSYB n|/zrZ_TtZg4} qrlj]SPW-ĕ̒\;& NWVq*C;v!"r^: {M'DL2b+TctF\yo.[[9W qCW(WU&t\Jw|>M/{gUu1\޺sȸWTӦ+qu˖"PU P ~LF\Wi( '2ԓ5\Z/TBE\W9g!\`/ 2WlOW XX:@ddޏZB,JV42l~nw=R{-7зZPtUOlYW2Psɝpdpr+.c&t\J"Z+!P]\Ϩ *)BT>⪅LPJ V5d+T냏@,⪅R3F W(Ptr+P˹ W2} ".+Yu`KQx*tyv]R'[Rx@cRؠOÅ%s^4h0}tG-s*Wz<<[RcQZ E(;CFPT3Te4F1;kOvf`PSF)&Z:TEU qe5Rte[ד+%\Z:TieU qR tpr5'+T+U犂@U qqRtS EW zm+U  'BՓYKU=&yTաF*# k) :FWp:@ `Ep%q% CW(W)*B+TctF\8%9%\`E'D Pq UZqB\)+EDk “ʥ]Z| UZqF\y]&"> OIi˜.n + <)UqI.*#Ip^kb * mc\$0*E/1TBMq*]|?J\Yhzo!pt:K~xclv],ŒxIo{u/9+ysot?Mdt#`vgmor||8sq <_j ih~fh>I?t^VdY7/oJ'=W> Jmi]v o9<5l5^i k>J>-sut޿)XX9!1W\~GݓyVd _&|7W' eo=@fN~8ebmqxe{㗦PkaF.x#޼^7K]^`@eiH+A4+t`/oװx }i/aW+\/>TtӋ_M&p= mk"pr;LZS-ԸKyGYirxi&(w Hi Qg4,woM\ }tLg b`DڧoY cS7}mYw*ʅ_*_BF-/8Q" cnY5Co"qy'rYO>c8>TཻȌB˂u9xZwLX:k2OYpQ$5(o[wyg ,8b:Ƭ˽y9(NIKM^!G+u~Kc)͌?kg'l~K^'כS[ߓ@${pZnRAT׏xWadɮ=W׋Dy|!1m/y>RXm[z/ps{^A (ˬ/y>.csϮo_k=e68p%ƏpyG$- ._G0neajXcS5'NUgyĩ<IoGp-#^9{0i,cY<\65sKKv&{ł‰MeJGKъEoK3VXU$m^חݏTP*z9^~uֻ~]k =qFSZ Gל_o98ԃ5(@*R3VQ3Wf#?vߕLQ$ձY#2tL#B *Yc8x$W aRhi]yA_) c#*(L{}>ќ@Q~OqlNøH`a-͉"& 9jL._翽M65~z={a{2+|uW1q|wp`#R\T)dz!m DB5SJ^29 pjY yѩPB)*<>d1_?cd8bJh.v)A_Y-0$L 2zK}lM5.ZV ?-L{uZULg4@ɰ@)Z{,;^Kiu::ԟ>[vG~q^,Wgi9_\KmV-R4XB BjJTE娢SfBa!TDhjШվ}0edm ;}$EED$j7OSH-HL=ٰZZNI,mr?Bi4^a0 8Na9 ? q2VٰeGPWj)"0.7==mJ4¸R.r,s,RUc¢n~U7Eak0{ogf};۩Y87o}?ZkWM4v-愡60co~~^?˦0 \USYY@{:-bqW+c1uP]v'2VmOBkզcYM%SѤ brB #nvi:+ :$;6SgxthRj=Ӧ4=Ы7c9cJhRJmz:=}8?Sa8lZ,qWPx G9c ,n;0)e^.LhVZy,FD-k;\4ycY" xx,K ַHS1d9&I)qz#NZ2z;bEH8+34GYe 1&8'n^dQK^HE,:i1ٸiY/'IA3'uK#IJbt{Ye^K䬖eaTRa^y7Dɩe 쭰7}'ۤ]P f4NԹtY0 m*DS_bQb* bY,jp礳%2DO-wxo.mZw 'WrjWiI˖?3Y{)._}]VnUzHOZ,˒FP0eS"aąqQSh^E?v/N B}S_cU_+/zn2U?}uccKQ̙DKeThKY1(F&$IŤbE+$$i*V+c̞ت{~'5'x J[U=Ltyf3'ѶnhO6(@M3/7`tv%OyϚN)Q:v AGLCʀ ;NSkh4#MPcqqqETwl`p ܣ;9( Ȯ޶h͔0\F=E]..Z}=f47뻱zH S-ǫovfYYVӔPȠM bY)+pNEn v, H_Qt,2ibٔt&bYNj^#17hh0&E.R8lN;oKꌈ8iY_cLeSj2p˕tԲ)CK:9+,]d>ތ dUà SJQ'-JQa{&n(1h8fAR˞tE=-:\L)eq+5U(vn(PД߲ 5NZ#¦.n[6m͂Y- tW&G"gCjYF)1֊Wv3pӮ?ρK8;p =G٩2!ܹ3S1)8I#yBO3ĞnD(2Բ&Db(Q:. HѠc7.& $uW t2ShfS*A'{=Dyo Oz /gz/GhHkY&Op7}, ;D԰A9##˰r$HzJ!E) ˪|j*1oв#Rj '-KzM ] ČhB8MhL@DSpf$EEJ;aᦖ:% H>"~wLO嵐5XɛKJx%3Q QBQ;(x.'`F1 vͤeˀ ItYrn:^,|0dc,|tÒv1fl#x7W, 64-&lNe2荸iYD_Fe 5tfm7eݜzaK(?N^ۛ @|XW'rqq(>/fok%L˵% /-h0ՇdЍHSȯu7o͏qܸoHI8Ά;ֳ2W!JG],K~TZue>{n J~+A9'-˪e_y܆4f濚],dW2y(H>7*4gg-J ^.':z:EW7#o(gO En5F pyN>2;dBl/UH2CX&/ɱ72^@U)&TD>_q7(!l|Y_/[Tx{In\pr0ͤ^b̛̀r)1XC>C{{T "F_< O#3 ܯFUI^>)CDŬ/yĞ7ZI#3ah(MY~J&:za1iq?H\>ڞ^SP@E GNV4ц[*u^cd|B cnhD9lNۜJ/ȹX-ډri脎^0L}xHO!I@FoPbő| p$'{^GJVj :W~s`v'Q|wCB)5/<m}uHwT=0KR|Rcg=?q8kb`)_[W%FaJ&FDs@v^ȸJTzY>{esD< 53m7h7I G;ؔ5"6gj'7,CwvS~ˡQѠEtr4WW(PI>v BFW8p^}LGI-mE[<d5kٕ;9LTi7<EqGkk@Ca!WLJj"vd^ vXܵl^]Wچ]Y6v>2=Zs慀!=_7=Wtu`#;RY Fry40ƒ}toe`4b,Hy3zoa%B 22DOIU7yߤoD`p0AbkhΤ$U ,蠅-mշy,5ARG僛'泃 "NCqLJm?~l/ tCf-L&h|iki7e$'/K4a*eBf/W.YANm|\޻rqqJz ys[-yս^.ί/VM ~;;yu~_6?/κO]WgS5HqyTd44+@*ERdw}n3άX ?nGڧ殭䠎(XWrњl]euI}/ljk\ebQӯP,h 5*,AD旀J*I @R猥e쏩AWED5E, DRUE'Mh9Jt0"~1=4RGT_.y>]]CyQA\$ 88zI[C`cyY ѼMg7 1 # 3RfAh`H< M;R BD1RLpeX =?ӮٌwuUwfQ)P,V8B[Z DR ;.mpd?k=K̘Ii%u-cvXuOl(0³C|iXwF*^ok{kcP'!A]u\CIB'ru]nfe61xc!b`w*d߷ ae\KxEP2Y~#`#UP:{2x倿2f1r}gP;WˆmJbЁ_2WE.<4}/?n}eYx pdw/l%__ |9aAYbN:=I?scM?$vKrYH:|:K$:$} T4g#?@(u<&@O2SJ}N3&[k:7u6!4~gNLzF2n rĶ/+\L0 E9 J<sZbr!ڶBP@N[ s,}KcULze+QL,sQ8°] $19¬#V." R05Ul6zu?}9ĵ祋.F E1 Pꪮg0 sC '*!eL4zS8_ "[!fExrcS] $acJeb2;@, bڲ刢oVi]}"DAn8_皛D#p\_8H<:e=vd㜥\{@TZ|.DIQ=`6|{]vR.RTyH]eEh;\*I?ʋl ҕ~k=+xm|cs->ܼl\)8ZO_I>n{9Lz-9}sRl c:w˒\VDneɪ␃mǍ8T se͂,i%udR\GqdUӎ~Q::m"`hvۗ9➑8jSR Ԓn\crvo9B`+M~G@JRp2\|8xqO0!N.G9". }]iS@ps6O`ȇJ$'l atڤJs;rbuE2^G~% RI3 ҬrJ1BJOQ$b|^~M_1֟ui  !9]F6ǾvrPfl({tUD1^yYK r'`8,a#.^Mh ! 7S F{teaha c~{-HSZ5uos.e1/{ZUO&-ETA!{Pʸz\#f̭`?煢nGeYJ+-yb81BJB 0yHZo}FKV#6KZe}O-8PcR6\N`I"^'kE0(]ѬiS^4n\0eĮaטvƽk̽z XVl6o AC4[nW^~pȴ{[3R)En8@UbrcB`S! 8ݻxR Ո)Dw.0u+Cl=A>]]Z5;(uʰhzisFI:u/7ͽ'ȓuT5P@ ;6!bea3&h-X&vP(X ҕaI 3`>Ojf3H[sNhI%%g9T[@$i&(m\COz>ZAﻝw3~a Hr\& (f 瀫4'(2Oվג lkT~'CqwC Ӗ1U,ˀiu rn=Am`nY ᰁiF+V*Ajp w"28[x(S~XY3O3R E%$-LRLh"`1zi$AtjMw9<|P;.LJڳJnᒢfϒL$mz_}UL%pU´ŰvW u|]Kz){IJlc๙RHrwInmknܤR붦X [7ul.nb@X^PR"Th,HP51ؖ.g' +2p!P܏ײ &4_]V X/5WV!-c 8EDc|xqŠhYҡmc4}O?ˆ:Dl3ZF {6/zt2[bHOd$w@^c=Qբ10!R*:>O (ccW8woT : (=bh1]L*XDx..6--c<"A fX_bUĢ!D.U*ѭ#uxjQ.Cێg1;{Lrn(νzp_O%耆By;!@ȶ?xmz(R(DxRx.R^n4!oFAj숈coݱTlÏ%O@OyB*1ѯ&!XΎ7څﶜBBRPn^6alQ$iA9EBȖI$}GGBQ>l~HtstDIs.,9}{v:V%gﮱ!qF^;3U>+ oOyiUן>P y\ﴘjwDoPIÖh)4ZgV?Qduv!SldB0%)`EU FFwy+ \U,߈ꑄ#{TߢZM"UŷfRJsm5dĎؔUz ok={Lk[ WZ~-ҦXVU1՟=Λ+W8)Be6+m,o̙T9;);0G@ .d])O1Xo>Zfeȍ6Ǵ1hR=_,t67I[ӿ[M7}M{e8PZIh;l$B$lH|xE;UơhV[doTC>/~_I_/gn:Zˊ1ϒݼßlS2)^VzY%YdlFRS Ԋ'!$ Z2WMѡuGTxf,aJezKʞe^A'Bd{y.s "\2?sA?|ͦv1 8@)WfwP:,K x$"-^,vlkxWT!C>I!}*6mMg3ED~ DǶU 0$h۴w[X5^:\ZIcXzf#rvP` # }YY#-c .Do,H"c7CwZf3c-vDc#!'r)Pt{j(B)ǫW%{o0fиyBq嘆Sr$*t^FϪFNk0޿3((Bf)#E5oIMZh KX"E)YxGj)c&W`&rW#D00 :vNe0W89"< $52yhhtlB"J` Ǯ\pDOLvj^!-a1+ R%dT~'Ϡ$XSK`\u] (XH|ucnI>n>&23L^sKp)X~P"趎Wi?1dsoa)D0"VE58*Sr SoF( QN:gEeSoț8v0>FvڑrZRAyYNwkzE00vƝu|VN"|1-,-PHƂaƹEt0_,H}Q/JX&a\@}מ.޸9ӯ D5{}H;XDYK/KcmCÌ4ڌY'݄ya`qRX|GMkVի?N0aaȦ{ fE6(H[ݺ,G;wG%KX.UVGGQl~9e\ȤϾ)L&nWx|#>i%殅^ ?$\ak뗬/ygo}k2hO;9| &.LV0i G30@|pǟ.'ݎ5E: n߅A.Gi33&#t|7gwqc3^g^`'}ۑ ۩ :^= K܅;47/5cV>FV;]?.HL1\ֹOeHVTX3SkMa_ 3_gWrU8F7p('ʔԵrxhe+F4'I@UM)HF_d͜y>mS D|UA,i0q0Wֲ} 7 L㇈x5Q1VV7ė>Bˠ*:CFj!#1m7mZor+$`Qԏ{ga& [,Tx'?}XஏVشt#)U=#v dSai9# X(VfW4ڻU@dʙMN7D4ui`Jm +,S(, wpu@w\ [,@sou$Uc\~Ѡ FJ>0 Dxg5V 4+\2A:msT r+E7ؑJxL!L9;Ndg\4,o8KA&qMqO!Pj%RP=MƮ q< M_|#FjK(" f7ăU8Xkk}̦cRA%fO /qس JTJ 5>ZB1W,Rf̙Ƥn$aR @2وRYÙn8`ݦUCpS K,yu<G!0ۊ|Cl҅TGSbISr^ Hj,đ;Nu}??Nj 閏!=ǿx) +u,\lefi7\5?]]8@}L:_|ס-d0Ӭ7|08W.': y2d `B)K2$#Ve⏗-e]lg+u8G% g ׭4d֭;Tz% u%Uq0Vw@1*Ǥ;hHwt9fZe uR4@+Ez9e)}vx3-J@-4\V̵+HwSi[AY!G}I_~uc7wVI=wxc͆Ku߳F4 mhP(`RLK$IvAW1amUДP~Y_` ˕c]qHk뿲Nwvųк&zc+ /qkzڑh\ڻq##w~E^o1/񷘗[K8:P ˬ\bʴX, [eEin,OX,_j>Mq\>Й6mUe4B5[ykڭ42LiN(L[2q07bzܗ g}>>xw9ۿ?L&}x܌oiN7 x'`@qO@Z{o&Ǝ p[u e9l`0Y7x9LOzPA95$P : ^Ճt$ry)IdrmFrEŁi_D'NPw[ JΠ[`#/P:TM`dŁ8 n|!GT't="I(m'tފnᝋAl8pAfrsaUެ-7Lݙw>V҉H[Bu1Iߦ.NM*%D=b4N=ݠ(>:v'|EƘl" Ţ9l ZXVTmj(5kAKIŠX#c}nw>-2;L^$ Tc(vI6ַ*Pg6I&,ʸ}8pQ-FD Cl~"WmwétISOJel~*q&T{)FLo%I]fiI%%c 3#DۥO唜F,K$cHSl9?VhCtAS8 csU]u '+4Vyn g+=?%NE5l.,'.Yޭo:C?Iߞ|u9I7՗̷!dkI3a}47V)O "xgCUJgIzEqCu&ݽ(tv8[<)bXyųp B8y7_i,+2 A2`PqhŻ@Ym7j`pq 80C¢^Y}@;>#_1/i϶{5-s_qw}1GXaҏzI< IۂJ)ZؼðbE)#`[!cZbŨ0?緳uƽwfo/[ӯ?P`>iL}rTם}j')0ekcΗlv_G۶)Vlc%ǝ/o/[S+~n[RJӟV$šZКΏ܏6tbV&V?"VVsӻuR#IVn02xŔ+Jq{/fa)s},,e?Z6 *ͭw?lԃ7Rv}~xN0)+? !OǟPYJn >լ׈yjݜƂ‰( fPs50 cדz2gb(_6JggL#Y2zH)0,80x (&yt Qw{Xt%bl~v%)5B;'Ƃ8gu4h63\c (G4ʅckO,"p [ڰ TWޑqB9'd@8)VBL~+zۻ U7LiљH/ L"W8y.NV_bD>J=5VC^V> `X6jƇ9yZ0̍NBRIn-2CQHnii:j5$R5".䜏f je GӮ藨CFvQ$0 ID*6i`Z K'nqwSvP+k^ I^ua k{snk420Wg=/9Ű f X}lU8veSSÂy4v4,*'Rs#lvE;P Xa^0ӱ6B_wf@#;^#4BVpxXLN .v5PC\A pӮc!& hr}%֥ ߊAcT }NSIO_W6aVT^A+Ordb.342c16_7E>{{Qt;P(-L5DC!styll1<czoP^A^s$nk<C)pqz5^1&iCX׫; t W+⑶p&3ice;(8mv+Г$UO~5Lhe.&]4Nힱ}}}AhA 7kAX!.D&s &\f3/P;~ X)”9PS:7Mnj7Tk<7-쉮B1C$3ȍ%Ȳ#J^ER77_#,^q7=bQ ?P8/cΜWdKk9eq !pgSHa:3-;IWsνQ[q'6\D7ŠX@G+.%a?2(\DBfB!.py-ik8~ږ@ozf줪aZ۳s}{OYnoW?9d vX哟i E(LJLa2I ye-ny@jH®jDͫ,4r@Vv 闪[4 lEiv*!QOM"ӤN7 J`]Io+O j_ 0}8A>sj-I-V^jVfǪ~b)/N0eI 2by8O57ߐ 9YN 4&%. r!tܽM #ąC 3^x0"+gFN;70b,'y}N-I"=: Ͱ(-8d$, NqyN1ު\]wnK p:I:୾ĢpŤ*IH"0 ,b^%ɶH6{e=6$)Zs& e0:2V*C sxZ^Gq}wӴ^nMeUkL٨=212ʔΚzo-_&aV&/0_^kN3GYWj)/?-Ηy %Wf&1m"^q=`mÌPs0f)I[ 8Bt<Է쟿dՑxi>_ۅƥ+gabn+l`#1XA4<^͢22yF1Hg`9mSQªߏzWY09<.}1h"F+vB2wgTq{泆OId5{÷}kq*(zӵo9FofYLb'Ϛ[EfjӢSFW=h-UMN\|E5#@d6&c'LoD̨xfY|5:EY)݂`yLm`GR0yZه"3DYBSIP5tx] JeR!ʜvm rsLޥv)_.mFޛ*[Pt֔g}&kdVQgtc= oKΈu%AW!ޓb\NoFd竤onᲨ}UTP 8IOçF6n͉v#/'S M$#DՕP D a!ysɤ^~]c]sr)mߗ9Zu:㫴+H=]SNK0i8"K+`P$zРAƉהkIQה**xgx0W2аlnj+jsϬ9[Ϸ6zG(¿i} ̍i`~)Z3KpUI"( H: I?=X\F 2#rbaTN RF&]$!b yMz ث^TEbe0$.|yg>X(8هZ>[8\/M03ؠz8TR2P +,{.]H`9q˯/yuUj+O:Ph"/Mom p/+}3eZ q':<ߌS-)mkO|d}⏒Imf (#“;cCw]Rŝ?}t!SԘU"iM2I4 l|h@Ux`-_ya[\έǷ_s@"4E݀;מi'xJİP~Q oʇ; ,U1UV$U"CeP(([OXxM:?(-B9y. ]O5,"V'I^ǯQxח߿i}ٗ!~{H 'hz$E\|4gBVޚN&boBRs>.ZO68  )6И709@Ff ٬we<E aFDeay.U0l]Ǐ8Jpt dpAFUyFSAK=<,3Wga^ S$"ZƊMBޮ߿-\~l^\l0ҍk$֛㱎"7[G(^?}AYatb{o%}uݴEf?F(^oL7g=U&aw? ݅,LʺD;$_ eXpxkaJ4Pw cX㐂$f4lA 8<ϥk(}͂ځ葜6PϮctu-Yg*(7=k@SV<: K@KQ[-y߬ʫ&ݷi§I^d7^Ǒ*!վ"}}o s7pr:k} cP8:``ߢ0ooվbgڳYOz_idG 8| cn1elپ˂O, #_{s"ͨJ` r!b=\R:4Hz;,0\#cerS,zw7!ޗs?ۘ QoTQ+R+}eVgUi#ldt%/YGێy=-_x-V,z<(1H^5:6 2v$iϭafÆ{pX̿dБa:A!Pp!uqʓ(2[!Xc`rᡌA7H)+ڱ(ᵡ4R^ JyqB0F # , 1duoBƫ_uTa y~5ca">q(ZQfm /]_Qcڈ9|c9@3{\ӥtO/HEJa.E?ԏ,2m7趣[2_ԑP FK_煇ji.x[2>6J%1?k12Je*ǰW`=~.LY9 [0M6G fp= ʫ,qgMA?hqQ\KwOw}sc!0w`+U\ɪn'*}}o(ʷΐϝcd;B!=_q!$ص7bJR㖺/3#"iJ,I4 lesO,2*WRjCOrV/B*"6i;;ʣ+vBړd5]Oz*Hߧf+)],otJ |$V{Inz-` S/s_橮l?d_$/!($T0q,dß;OF!8HҽL^"JQݞr%¸h *}svi\({' 8JU)L("W´ "({Z랕J@cfp S+zc̓] }߷6E6S)"1uxlM6*+}z^J4<(%y~J[fkYke[cE")sQ&]Gơ~V!A~.74P 2ΐ TIPEc7RE O%ʅPpBI -y[c;LDDzݝym zྡ*D}#H3Yڱ߳he8:1U1jeו/Mr̤7 (ۛRܤyi_$AFF8l~9ד~͹^,$9ik̤ݓ9+bR)fP? XdNBF!;!,rSd0shU%|yFS3r{# rd35 CNnHnɅ MUDL0)sI&1YI.d^/!׊DD>p5&r/EBRU(E.D7nGrO}$Z7L?)#z@mW^h~۞f$b}*Qҙ1F)NX1HxTK[[|b4lAsJCz|(<ɡ~dG݄6AqX@E(;xs̠ͦ]kT9μD ZY d$Ü\#usFSW,-;!2Ʈ 2}k #\Zi_R2 H$ivĶ58Dv#뿂˽+qe OliD^Haff*} =4fashUO=.A?: ont {2|THɱ|vW+kک+؂5L#M}|~!&u)oiYToҼQܻ{z脁[x l>6FS$7 }7'܊Cg(Q}l^Vpkxq`8f÷àҷ 5滿XW`)z-g݉ZPUdSp]0&BqRo Z{EP'W Wx/fo.3ےD#*Qܹ QK>932w,oO(~]U@W^C8b9Dщ&MIQ Rƈ^P<@L6!aI@P~,* =+g^ Tgs vRKZ4b1y2x} IG}}C|{A2a;:WSPN'4;+XR٨̪ƌ+ۦU`-1gkJAqg/,Ia@"=GN&J($Y,Z琝KX@\:i x|Ӳ;`_.{%Ф]ĶV#Ygb9ڟhV`3J5((f Zj)DVr{9x|<#^`O :[ ^Ylu;W|zm$ЛA_A%|>]c5lDSxʳ9k [ EgMED&2>2LbG$DS_nXFgP5(X"kOZ0 s:[7c)DCOcPdD[,`IILc-IX4إa%n+ ^A¤z;\1$_bTQ!Zeyv> (1ʆsfg%x'Э2 X v&R +xJtSb qz\cTo,E0HSdl>7KWFnk*rjrOmOU)A2v $`7yh1]EbBSPH0CQI6WX[]/Z`md*jФ* ^:lT 19@2>ObNSDKpͬ啄@E1b!1:HjwxM'ocܛ7٤lo%!?3ta{:[+0i!Ķ]TVNlfJ<'"X8|g 6TٳLN<0xyGa#0!dm,ɯ\+ b٨m| rsѼBZ.Ś5V]ދ-0h81Va׊d8VHL 2k) E #I@1GS#2fd T]Mΐ %Lb} %&Ԃ[d܎x$f pϬo~M7V^o,Fo"n5o.oq:F̘򕧬7^LיQZ+2&%(ΥJT_u"764(XUّ1z"1Xa_HR2i?FTcDֺ1 Q0Q68ӵ6:`Ɛj n7}dDIp1|ʙD?eّMU,ԇ<'˭/m]t-s]ߟ14Be8au`YjGG=5ͣcu| Cuv2=yOC}N\^v=BY/E"aųk@Owo]1#ۻ")tʄ22LɃ2dzC14{ kGWol \Ktv|,l3- =>Rk>hSC巈j.t)\{ٓ{FW~0r/)4=qk!XZ~筦Fz=C;_ޢ" Ih걫翝͊6WJ pWZr]q[mխw>mۢӿ7[ VfrcAeP-.sf/~t!ѯgfbvswݐc?)_<BRHE)Cpx#*</|DŦ[deR4%JyS+~L;f9F]tRkvms1K/ΥjTۺbQRm(3A2Cv Aȗs6Es)CӤcP2kG,KX ќs;K7pQ!cV:+yІi7׎W[cr[lOOw[}>ۈ ʮ;hv=އԂ|ۧ>?ӫot68s7xn5vVG& 1w a4'n#cʭ'JLcBaQH<&mr&aifΉTCU3g_)#j;&#m޾sGhuB;~I@<Fhw;'Em4=N %5]iHM9vBsՙIGP9*!=$_ ;[$ot2ʷ 5m mh'8;F:aS)_5^uQ ]-]gzx{xxŜp6 t|l(|uy!]pVGaÃz[$XMGxXWfug]6rUx=r_w$ 6|sY1] NX?X>{y+Rn=VklލLȔal;eMoHftW/{׶GdEC7/ Àf-X ښujKtK䔲vip 2q/O~yjS_˛Ӂpi(Oͅ??iKW;jꎚjp=dÉo]]0bI>7A;>o߻pZG`EZ8M%jHԛ3Mb `Ȅ8C.zSkqCIw*<"sXIV<(5%QpQE滢DTQ쀑%WEeV/:e`B[-=7nEo)FPuM@@ [ztGunRq]^Ud%c(%cHviE{.͍5J(TX۳RzC0hhf$,p%^jvނD;xz3G&7َ encot:[|"G`/uN 7*߷1q]u/XǺRTC0x#pk`G,ނT^2\ΰjX<~TtwtIn?l9LʾG|@I+-^ OK>d4f!|7;9kwxդ*v!iXޑ駏7ǟZx3M}Sƍق8bQ=:B^[q58i28J,b VqoD{`lP Sijxir}$k3>?NFr_;6+ #7/g#>Z.UZFOd㿷;??={OF&ү fOZz?_Փ輜m,|hH糱SY$IK[y+[[=썬-ٻWşլiY27Oə:x0l @͙zc#$TU. [`F5(2/P%,$v`?"1;Vi҇şwd 92wUmGA<{g4Rk*CSlDj"mPZhK"/q/ʵY~lezy?x?lˎW]o尽Z~fjkTJջe`ءTt[QoOt݇%a1}i/+3}5Ydԩec2Z0hT)i43ٙ:Ws4'0YYXRxye c*4 \a=߂֢yA9g^Y<(HH@kx/j=MzBfד&*(nAcnjam = YJgگy6-~c =}@QC\w AT;FErOW(gM&]Ɩ2٫dQ=dC^+p)B4G+ٍ w(XVӕ *g'FxWOA{I,J{ u +!] /j`)Ik y0N^_oz.NOllhnhrRE(jWaeȸ6g0F<'A`s!TT0/^^9C:Vg.Χ恟|Dͺ"z~يMR!)ޏMƳ4y]EXWv72'VoG}'ΏwqvS7g_ykɸnαa6+F [8@,Kz!.uoO>/_~irδQ(9c$ԊӝU-SBH5feDZ)Zdb.%%Y}|탐9QO}d2=-Js\sJhZ;/M{ OO*03B!QATx۩)_4t? Go ;: rRTS_n<(QOe Jf3=8qw,_Ö#gm|gEodΩh[ rJ>8(_z+s)Kܓɍ|p ] ^-4*[D>Nl8LsNP՛{M&ڪM}j@Xӎ<ƙ7RqMcD4cb<=Gis) &>/}ZT #Y £8 6cL+ =VЫ]Q<1ufĻ̤5 ͐WFXuwVCqy r5Bk |DK߼inY8-hX\!2N7:Jr(WݺWIuEvAup\Jĩx/-N[ xk.-_ Ef8:keƋ_Noon@@REE뽭(V ]]m͍̉y'nd5Z#[9Kh޼>qQoDb{8yNQDE_/,+ء@Wmj?G`:]qFP-y!5w<׬#R3qxal6|~Tm(Exwmu8uc US5U3o[klw9_P>R$R':4A>!k41lcZj ז)kk nnC3R.fz#xJB FGaP3$t?䃝b*cbaf*CS!  3B\żLHi XramA%bZo-2Z s'8zZt&9%"4{"W-x>9^`r v^ )1uRAtB]q8܊+ݙlV_3DJ70Q+;1_XBN޻nzQ Z֗@ODz.cw&YJMrAzOd4]٨oa$Jjԗ;gvqi ^"en 7F=$>&r}0go In] y21=%w~jm謾+9<=pn uI'w,>w>k] y2qx%?/f$]ֹUR3CB=&x|}'+a/tԷ*f7u´ш?ŕ-d@&2g佗Q>0>":Rvl~ ΋ȃHo,Ck3콝A3䉸1#(0W9M<v8LUGDqvR LyPjdb5ocH|/Ş|2*H3.)6GY+Gqزx Η C2|ѧ56d z[V uj $hʙ1Vy vɱ&MUʞ/6;A έ yt^So$C` ĸ>[,`̕Wm9a1aBh3|a&T jQ^۹!*~yrBNep@N.<rBFO ϱaDGKAuRsτGͻ떱P6ʩG?]P On L)e]'r4WjOAo.;0ѧ*B?:6T=Zb8+AEW\5";=VCWI;dX ZU/u*W6ҹDJ.f,9R)`X h1뜍Pw#~unL5`]2%>MpRbGO] wG͆{Sr8OgU_@rljpN ~/pķ{[|;G`T5[?~o ,\%LԔ[,ŻE-mM3VΜlHjֲc59g`&^kP@u7O=BXxEz8t9Ă<(x 3AAxy)CtJGj{RƢTx>qar2e#Ն446 [brfՐk'꺓T $G 1.UxkL1?Fc\v=r?ȂA`$ #6/`"cv}GssP[G|9t\w<'켏s'W}x'u)șgݕ<*k[Ӛ Ts K'݈2TFKTj1P[ 9\szuѓz, 4C%0f͐rXSnY SnT/[1ik5IŘ(Fl9c^{HSC=!EJ %Hx㤩)F@%5wr&?'\o=Q ,;#dE'ęm"&rkuWqr+R~'#ַso I%q+1kk@-i}zgS>շrJ@?Ӄi]_?h2׎N~wםY}Pjޙ6C#Og -"b@ ǚ+A<Vd So-1C[y- P E>֭^ rjE@-mAﯷg}(э|zoQyF2@n!^r,=wy, ?*'w <9x>_~91=q%|t13؏ |7+N#/W1EH'f#^7u/tb ;6lNe\HW~R۬҃쇥9g F'(=ąsgg"?xQO~v6c|=Qo-p@}'$/Hީ3 q]7s"7f~thg{N.،uG! m}tN-HܠfL*9f.d`㶸A 7ُ$،m:g4! |jʃ^F }e0<( [9(fllϙ<aThn;Xhg)ƭ;XhNCv6`w*2|@#mCvcl!c#j|N2Zy6bqv;lY~ }'Gnv{f.FQga`76h7gݖhC;[јA8 YGJVCAL_eFQN'_,ebF'uOTXcž.zsi9U45h %SZ߻cdL8)Z?O<.(J1.o˻= V$ MaA\{/.X"\ 5C %CLPMWpH;WJXK5j2F;r3sUzӊ>&KD}sJ ^ZsH* Fg[mKӸ8Wяw1c467obx|匵 `7ڄf,AoLM]Z{*JT:xNT|Q׬3`nf;%٨MG9ЏyTFpܚsΪb$9gȧXKͳo9^=Ty!xY ]X]F tVR욯\8#f=@z2HA0`*zvV)NӕOJc춛( ט ՍިBCÅvI2!u2VK%bfF|kVgR}h5rӇ_GJhR\k4yv@h"+!yE0`OP\=Fim*'i^;Xה*uѹ{K7¿m)_zW_v}`,#,)V0~>>x3:y"6:hc6go7"&d2qA i, a.$sL j5pSmf4U(jܭFUN^";y(Lx|WChs.__|H_4dti2f&LpjlweH )IlzɀE8]P_][oG+_vԗn~lovb$Y`W1W1E$e+Y?CR=ҐÑ(tUuWu᜼ yn͊Vm]Ͷj܄̾˽ݚ+~Z=ȧk|vzى?3̰yY/4_t^uqq^N\fmlپOZ- 7RaM}x%rٝtᭋ/FfWb꾈UYfAG}+< GIތINidȤ+ǂ7BK#ȥ*% A1IFi)B? cDy0UrJ.9u0%L&k.-EB\h+"P2Έ^yjR6RM( \HN -\3tl24שΤhGatgOL q'թD /b|@W0Ni\k cLv- 9dQAdY:4G8L <#%@H- fF8UP(rttǍTyk3[<.fjկO>FHOc-ZOy#y 6S{A$KMɥ% Ƨ,X3%SM9ו94z5XQ]YBv=,* j} '-dgPV`mǃif!UpJvI-'뺱tK^X:Zilojۍr$k.T V栅ͪZ#c} ғ%8'8,dՆZzs'ynh<7nl5m9 mC;M _z5h/Z6O{HO5t!$Pzէ4AP#b QU Zz163=o֘^8S s:ύ0̫̈f&@UZvyRS-;0(ZQ:<vr\MG%DAqnauMZKݱ%tu_jBm{zhE{BzIKj.L+:Yhu"ޝ3+^/y5"JNײ4G&q[[r$}[-D1.$v)bk8UZMv7͸ty2g%K~H|-kMMo_vWZZSITE]%fFJl+";`I{jsh#ϕ:0ǙRYҟlzwfQ*aCe,]IJRy!KaȇP댍BE:c僪Ղ[&+shX\j@Vqco  P&~!SݥRJ#U*@JGTL UsB,W2C.j#uDɌBK%F 84 ,%!=tj7ah(McIs`m}n/^ ICh367vC8%^ShB8\m 1#q@C']h,ͻk;489vzרJz[7yme{J}f鐄o6}f#kEfM_zּL'؝@g3u(hxm rɫ:XRD1I* rCX L6b$ 3wHu\ߎSzQg79wv4~Di*F_[Tb)J= MQ\-:xs*YxB} ӎqܿyr;{Εbº\m_{^kx{?p(7Kv̸)N#[C!j%'OZzyɫ^r.>֠ǟ-XGX: V~>UvM4`G<2 .#(]MvOMnIIYCkoYVU^ gO, 8 [#߈% l敏bgwnv]L"n+VNXXx9sQ v{WqeY|Q*ZN&JoCsfքU?U4@2L '5V\f,Q;(Ve$VD"օ+ɽ7;{83de/0s꓂^8_rWu W7o5xnrpTYV,wFs2QqX7_BiVǽ46sh[-3'b5h+VMÚV`-hQRUʳ+dsҰE>~0bE Sz$ѯEe pr& Y w|52DNL[5{<]nypGZ,<Vr`{,#ޅ JUDl>$ndm6wCٝvV0OWV*W*bء$;91 Ji(-xd 6MsK魞 4N--cOTH{ T+I V<(f CQ7zHw)w7&񦴷8Ļ(Ty{b iSnb ݎ'\J ZqI7ǙL26q*3ejWi {H\KMa9Go߲JjZe iĨE7X$ث 0Sp ۧE=" ? "6r7@|Ijo/&>X#HҊQIDDE!e9?Q0Gi+놃o+yS8 xVrf V>U C,|MIV2nI"mݐ^ܺ3)X=GfyiŰV{kz?mͭ6BVr?3IQ}uo?u]l.{ۛ,.L݊|2pjSɛx|:$&_1X_jdu¤MIvMUl$+pLE+V}CtUJ(8@As%N];FUTpU\Q1+ 19F[7-C1Kv\*.1A; tB {@ RCvBZ蚚Y0-F RdWYGtd' >J & 'eBdw Ȏ2t\SszSq[*@| fI9U d™IElrKPhS.2n. 1%cƋX0BN$2<ΙB8'j)% 4y̴!cVDLb3YbZw݇@5_!<74Rɥ6?Mn |V1)_#+&RRǏ)kxn}rlx[=髏|7 c }2ǃd:#nCo-?=? `r7s4^nN,r+~lz%7 \"HIO8>huRMG*t*9UEF<9fOبJ$)D&x^߮ǟ0ʨ|.xQAMa̟xiJPRlwjЕ><ƼIp>W/߯3Tʸ#3Zhh ôAE?C%=W_`zFjQ?D]vsU[Ɂ6މv 28O} F ܽ;e5h.+%dԁx5-@<]hVy{HBnhvQasvo)cDd߆ͻ\L+& \D YY JʭXE5XQiˣٞBgZ h:7y3#A:9mvç~y'$0RHJI.qVDYY*tT ?%/itYQ =6yЎ >qHqII,ωpZWmFH^hÉX8W}.2Ħ(t r,RQ .D!*#dpo%!+>&J:\e<%4S*@YR}xG7tOwP-^ ?}??Тg{m] J]悴F?$ZThE;nI">V3K ˦׽p9{2FNyT6H\HՑv0ٻn$W vg{f6A:ɼ$xM֒:Ig#>-G<嶂N- UX7_ZFS<19&U(t+An^'^1Q=fCQ+m8ж,Eџ^<|e4.iC oE^"?`}:&u/qɂ׸ʆkCmdDD@*\ގqVvyMh_ KؓLL] z..My׼cWڞl^(aߋ^yCx* k4*':fŎےqx4߭O,|o?5Ssh˖(7YK9%cEeXJB6E76֒qom~4SDΛutQ1<=FhaִšMI͛>~K5˩"H}WeFnhr7Bl׸ldď~8jpv+*BT;'pH]EBN0^Z"d7nf_kD-+3mV*V}5Nb)^xqW&+wq6OrOs0(8.],h:R]Vح56R5sm4H .-}`wP?s3Ow N?nCZyϮE[{ɥƧdԊtd8q&Z<_o29y39yx^OͻN_(wKuT~a:XTXa۷B۬}g{i'; //r$Boum񏛏Ra`w㝗cR17q>{烺݃9Z8$Iuz<#pKi\3%3˝@Pj_Tj_97Ouqr>l_L>範v`8S&v}l%ܢ#i#)Eۦܛ:]@٩)EvنV=Ͽ|;~տ[eͭ50zuV0+&! y2&\e\)Z|uN0Qam&I9@JILD,+pb2ƅ A#V Q%mh`4;v|? rJ]?eP\mߟ519h^'\Pe:tЃdKm`VTAJ@w߇!`W/UrkWa@D'?!SKQ_'| |OAQ"ՈkTc}#2G݄5hrXa(9r6p`ͻVСg:}e 7ʘd o;/ zQe01IQKv\ MZ2|;hoӻܴ=ngW19CP]"czuy8ۑ7Kՠ _aq|HhJ`>h'8(0 y@ ( Ӣf-)ٝ'R,I5 g5V :gu&[scqDRY8?@h,%$ܟP/xJBEU(=vv[L/܂xO[$ʡ@>y$|q  4 MċeIITQDYo+@hcjH1/Q%(ɑ; 3R%NZۂ~{J3UYFը Z/(!z/XOb6 d^Y ]*r F; ZE98&IDr*O[\u:o D̞޺8ME⻊a)lY$} %DɼJ 'a!xUt£sqd ;GUr$z}htxT .8V>>:dp B` ýڇ&kAsCa2$ &scWI vO j4kz[f =bODQf) \k% ae6W::VF4Š0YwZ:-hpZF)\tn958훔8g7].ɅֽmQMBvNL/zrzh;> b)2'%HPս xt2]_G$P_Mns_\8B D~Y쁄Ϋ0=)yWT3?O @$w?PjŴZ}{T@%d? Hm9szPDɀG=6BB̶؋J4ļ45U=ޥ)O?8qww J`q&5 zI3ؼ !V\_Bڗp*dfuS\)H3\ݨ\1~y=6p…fyEaIHŋTU(qm'-Q Ed[eYAPhKePP㐬9~}ETXj gA8OqB5NdE3mŨs4F-dJA::X@Xq:jO" $@"wU(Jm ksAEC,c*pZ:m#T<ެUvGZ&32)I* 1rg4ԛ@Za%Bxo-zjPD r|1h/^dh8cu(4H>uq'3PD 6*lȹ9qGP W!(fM@)qd}Edy*M2j]#Ί/6H*#A fj*d4LYkhVIƒF*NMh''4ԃD[hZGl='Kʋ cV)>8U> !2]908S/lgPKr8mz>d5-뫛g~t>R4+ҷon8Kc@ߞ]D5^ElmSAF'o8Bz2--3"ӽNgX\-f79yА ݣ!q`h|U3oLC 2t؟eē'Vp؂SEN1ڳo6d#t5J#${D*"T?G& &;ɀ}?RӨ >l>3\vAZ!>+ߧ(mUbAp19'VYŰ#J^_Ur+8%&ҋ!ȓu."KR#Hݵy$7דh5Ubٶ~ KTfG'Gg8픃9 R6Q%ay$h\]PɬvX>o.Gps^ԋ*qk Q?u"tL^d颬x?E|6R;JDcbR6V]XLè%Lʂhu;<8;4)y42óm5I1鐉/)8hWAN`ӖRhO1,l[ыOfm=4D-ú(fhcVAKF %VOYCDj>LwLo'% *Jt9Ҋ2LB6d_v5λ~vZ94+ekFpS5)Yx&7׽qCᙛyk>eSn k$~2~WTs ]q, 34 >Ekߛw)@ٮ{DgstNnjT4L@3[ lw7kC4)tlU _iN"6Ț:sn`Ztj!k\E΃eRu!DwDKT41*cՌՐ(U \ Pg˲ʿ6S3ﴟW˒~Q."Q)PM0NX JGR2kʁ e h?QkM UҟR&RDy4"dׁ#jPґ;YGj\ Ch;L kvwAi۰lۆ!e.W[|c#6t`L FZn& x)_מ9$,®㞲8)'H`qnz,Ng^_cq 76|~ZBK8%Jl .'h#9)n%uJH?>pP"`'%a uri]~X߱N󹟚C8RgQoh"'jtgh?~Di3p:ukI@9pFnxٻ߶e2+ r(NM&0}8ldI#N,٤HɴMH-jEofvfw8Ι1}s%r"wv4QD4"Bad|ꁠ"V7'}6 P7E' !kr>I9w9a.H㜣fTRNCE' i2&8I%/9w)jrwe5eĤʭXJ(f#6+,W_bsG ĕBQPl~F)"F V=XZ+vPC&Y;Yɐq/n0kPkgO};:(s9x[l`7nMo~ON;=~JQ#`&;k*0c]{0 &0F/knUaeZxw/BmAJ"GhZH= Od$=>Y9Ș.юqD"/:pq,|4(yʦ&g= ASIeg["-{Pk\pM_CRg!_c\ًe`-O^ܛUբ8ҶI[iFAlU hj cG9Ĭe(ضV88 3S5JacGbᑋB<63K1#Xs㓌7)H? OV$P}bC\ʘ S'Q` cε:nL9GQ|9!2$1g-VԱ @"GLLl&i\B8.(ď(םRJם|a<_ u9}p5ptѿr[ϓ LI] A R@\! PraMT^-wP ר{'kZ_wi}e~SjzZ&W^k+|OOdcʬޔ1V r6"hr„~ʝln|sc;?L AnC6‰gI& UʯȆ&`r4P:2R z  Z]2L_?\ Z2V] .=pw7OGZA"sYcYF|Z2nWǺX.e}ˠ\U*h~O ]>6@*T>*_w-iWe BZ Qq_惂VÏḠcQuCvzFg@6}Ha:JR"}؟/ ]_YڱgٯQƎ'?ܡ@|Oa<{CBNaEgݷKlA>q^vh9ʰ@ĺYby<AV^0_gfǧkO$+15Y/oٌ gg'|rճqzz5:K?/5|#2N S%[N*{4`vIo/`<&__%Wt+`^Ծܸ%Wi_-9|mAKE=MĮ ӷ84v<EirPoߞO~z6$~ltGe78|6U\j:MSNl0OZm2fnQ|.vUU8\vW^-LrW;J @;l.7O_?zx3N>)&U2}~=dT'{F;2N{NXp_Q3~;n?s=Jt M0JI~N@^`K}>=_ kNNu`_odF5-ނXǏQY:|5fz.Y6J_l7(3IY2g39R6?P;7 $NuX3. Q+c]Cs)isMj=̖ 0#V$CsK_PYoS_Vlv&>b.+DIk;Xzw}ޝZz| &6 6#H,Py|ȹ(p;wv!oGuPSۊK6sz4;_qpe-31vgڡYQS"r 2WxDśBM&;?s a<- Hiw"R%ː hEJt"=pETPF7# ImQ3jLSr0ytjxQY遠"% 7"ٓTRѤ|OQFM( XD)E*Hx7DLכ㱖 P-^0It9}MT{lA(.@ulL: +*Buv+IX{֠>= kOړ'a7 $L58 3"c "p$ 0lIXE)ۓm/'W@^6]ˆQX9 ;:?cA9M!3m SIWB6)䥰a)K! ag)_~mg ^֤)mwڝ, F~,;KΝ-}xʩXjpwhPX|G-Ecb6ʬJXڲKvVqqـl#FQ3I?LV;ּRT8s?y_m,F.K¨:^~8sjp\c#2q8VքHZ'm,qa8*ގg<$9{H@!=^opۗG 1zVXbE2$;k^W_۳8j2M8{ۑ+gOӋ bv~ 6t?Lv;ߣ FVBơ Uqʞ{/ )R0J;9CUhpCUJcwc! ή=•GTR2qh>ډڠ00J4Mm}tOd4nsɕ;Ɏ疅F]/O>U# *>pC_u7HO]Zasq_-@` tދTY_ gdQgW@QfE|}NF\%N1]U zHP]AJ&|T{H<(Kzau9jBAC}eCp$ǃ$a7_`inT}) 6Tul7[IQ7H&KP>lvy6JbHDy1 t]-'E{c-W oq}jc1w5XiㇿvpJF‘8`2#2R18m1J-Lxm1n[ܐU"²/TuwlM] oC*>]Fͫ@,,Ennvg_O~~y;,&:KD*0%E'' ^ y K7|cQ朇6&V]Ĩ tI`"e FUT V0w¼6tw9їh r[A [g "g Ag-VԱ X,WT +bq~ì鮞tdP/}{!2״<|i}aΏs4εo>_ 텝t- Tu'" AE5sD]]Epq_ Qq8@a [.b2 1*csE9g:bE$T8XOM Kfzvzi# )8xlMqk>3`sMT~[ "}Cl;y(X7tT\(r\*EJ^aІLcjՎ`BZƜ!d1fRJa 8ҮV}P%b[+Y^X.M+  "1Ÿ_3 F?ϫ',!͌&1 b^\bMXEGȈ*PVϪU^\:ץ%?LR|Jm&BP6Jqf"m&BLQ;2" 3"e_o!B&B\qpKdJ*/",jH>"qG#J"7ЅAiE^dHv[/"[/./2oЧG.4aӈ*PHġ#\,$T~d<"?4 GIWBYf$RJ$OvJ[O$Ii=֓l=ɝM> y+QdHE`O71"y"M <.W[WrJ/YD^NNڠ$̗pו3y P4縜]\[Yd~ZIGuLF;Nwv\7PO %>ب?1IL>"l`tl5!`xhm45MK<֧<;Y0`memЬJ#:@Du4+$7E藅כ<#ƶcX؋}2<5ÞY}Y%)++KbVfQC@ ,&E0.#Ȱ2I .7#&KjA2oAia MPk߿`{mJkS^t^k.%8gJ҆`π6Wc4X{A(@'.&T􆬹gA%85gy 6@N 9?=>#bXAe=XȽ+}BP,Ĝ_^c!XXȆ@l>%R=Hވq!6d&d=֦eP`,dܹ525֪_1W@od$sl" k*PpqhDR:Q82 `9C.#Y"ee-/9 jfG/ɣxj1]Rlڟ#޵?˫:>;\n]es[UmK_o`l!,yaLfY1o CF(퓍vu79KV@-S;)6SYn*#@XVl )(c*& ߻/;qԶQqhdZ6dvWۧ]sYrzGGcczϦ@\?C^e@sw U sCy5ѡ:~ !HEM>'_+\+[2+#L62 tF1+!ό; +@&$UY oG_lK20MeruKئbYo.^Fpq͇aݮē&N_fM5½u\&4Ogbʺ,Ԙ?5NI4q=V12fݏ5$m";D+.>|~2hU]?u_mr~*˄.:dKeW(lL̶A'@/0z55lQ2!.RGθEv#aK H=)ᨷQCrq^TgumC Ҷ*JN0پ*7g%aZUH/B vRzn =p+ƌ-OW?~9&OUPq&ԫ"DBtsCWZI+ܴz56H@#/ k{QJi y ,p.wB΄K"Hf1rAʚ`$@rKimSȩ/rwfuk3pMj$d͝ǩoJBa:go7Rl/$5zv^8ermNk|MjJbۚ|fm}]͎{Tx{cGaA0~k.~H ۠q(a ʋ[’@B2qmÿ[Y/_}{ru:u'$iyHT.Ũ/jYԪ;j&a}3ﴽ/w{&N>F`^FV |G4[@i5ö@7;J)JJ==[A[)LpXc ]YѪ-Phf,ALg"eǵ 12}o ]QdbCVa)OZU@\{G 7r\#0_-YaU.ϗ;W8Ce(SCVr\2LZYWMr ?&n8O6>b!B0R\hHf%-R>j&* 182iN1"LXdR˽})c^ ͱ]K`A;y۪ux!e 6UɅ4lUEhlǁ1j=Ӭ4y'bRE2( q:F:m-HS*<YuЁu! &Y!:Dmyo E$AFnm(yol``hTrH<1ԥc;7&QꤑAKa9>kA6c:460Fm\vHRё[ˊaclHzPo~ BOrB0>[%!,`ԗZd"nl"h QvN/\j(Ebۡ1gVb_J֓4d(VCC7$ZĞ" &/ӂs&wS0h\NHz`:8l(Qp{beή8 *RK0UZ0>O zCz#0 HwJG <;fH$I,YNHzv`KaKm~ %YrM In&` $/4Zȅ%He&($"%ېچ&N)8N;l$j+5]F%u|7i(vjMlyPY/]E 2qyl'L97$k e lyVPYd"؞W dIӀev+eW.BOo~}7Y [NQZ7Xj^ߥ듫e 2Uϯ\_zlnwGwi g)\AC kK_}>>;#^޳&֪yHJl MΊoMc8 d2A~1Ҝ#UV* )`*r4SZd5K W>{2}z#瓋[Cd0foe6 4M9Bٓ gYJ*;i0/'!jhDR&%*d,}Cx;Œ 3*]Z!>MC=SB(ړ,|e9W&h4`M>~ַTD ڦZm**CK⩔*2eڔw̡Đhl"2˶2\.#znlʬ@}%c6*D`ӃuD闫_K%̷5 tƽ ]NU{UGC)?7V<џ2p5Λ/'#ZUOrBT"ak'V~Uo+} C߮ť$콰,_[{8z3]]YG+D,fwX/`Hְaٞ1ErHcy0}#ytɪfݤZmʈ/"223p~߿۟.=]MP-"\?06˰Z^G)=үFb]>VOa ~^no14Coyn'f:Zx>V࿀͋Wu1s6VX1ZZAJj WjlWֆMإjNbN0i~tBHs0t^Fa!WLzIH}D0Tk1ne$a(_m c yZj-FPso7R`u8x#"X($M}nG"Z`$ _tw]UlvI-?&iMp h}_( k-tXH rqkzdj)s>lQnL8Ơ(ݶO'ෂ;Iܔ垻uo׽Xdٻ,mqw2Ws[p֭>|ewzQ~Ue3s6%5*1b1Jr:X5n*w\PkZ* twҬQ=ih^%j%7Bڵ(zLU"1B#vA20vAF=+SFy׸JJSzk`uvsSJS:䔂v2ppNfp+FϥT ĥfZ=&эؽʒ\O&\M+q6˗ûb^ĂIyu.Ct.={ZK#i-уY4JkD]+R ۟lKw׀ܑM>,pQIp6Aee=8fVɌoT{:ZӝExpkJ>kP7Kn|A/[{eNww1@kv TY!%EUT)>Jn=E"6A yiBERۿ\t]X33P3#vGG`D`OXM4^Sin)ҭIe.|sJ{Q9181T#\DL+ϕN}j2 J`9ۈ0X4),,ϗ"JXݚY ܴjm̟z=Ac(V1} `Υ a[AsqBƩO]BQ4kBƆd#M SXtڼfJǦzF$JuXȠ.y1L%N[egG(pd  y+(F GuK+1):Xp xOD`F<&B%,0.mg݆k9+񶙵BrV6`Ǣ 3ĵR~̵ xCBoZHxr8v-p9zBTk(ƈZ#8|G(𚹨Zy[L#&Z3X}*/Q$||5HG6$U-@N-V[05Hay,?q-Ȋ.XVy{L hӠ}!@>M* N GUC 91~{ cW`f`ϗ'8JKk6Ynn5^}Sjv3BpW+0U K[_-0):bS$mX(b{J 5P/|K\L^vG;U סcٞ윤;Y^cs$sS} 2!ѵuR3{娯+}b8csy#iRrp@^wŔ/-.C*?i2Uڷ~X9"F|_\ yq)A2ۚȻ|Ja:wh1Û2j: PA?׼ݧ5>y5o3OI87r1ӎ5kdm B/|пFo?Ljۍb#>.SҰy[o9ZǙbE]KAjN_gޑxk%.-{>/O~}/ȋESB4m""(-"PIT*XQ1r@pUnVGҡ*# jLF,E4l0-O,aO*n{:K-Z(Q6z#Fᱴ0)e'=uu֓񵭡hW+%ڱuC])]f>*ȫifA [^f5oSJCL  { qFJb`%wچ *-jQ&E{lI7kֻH_i_Ǣ1c=hO'ʄ}%ޝkؚ-t1+`C>OqW4_d]r6г-Sm֙Tl "2yGmXH$jrL<}aH ˤGub./ŘbRx>⥐Vk|* J k OI0 B4X{`'n<=~9",#SYKB^m-kN `U%E%>. 0qno`Y;7U66,Xmw>,@]dY'Oj6Ќc{6 䀓YcV߂\nvGv?G͎ߝxlo8>O~-Rp7Z]RGb4B+"%b\v̇m l4´t!Uάïڎ L#sRO+GC\Ϗ'Еc$1p  \%>Q?Z͸(cL̄H3ђȅQ:Vj,nkP#@slpHj ;55ނIQ !ɹ{-@*jcwn/_Z '[gF+&qh i'ɜQbOET;y "ԋV + e}ͷ?]~ |kI2 6pb _`٢hCc ڬ s=JzZx:fajbޓ.귇?f  [s<fd^ݍ=<·:b֬$@QMp[S(j\&D.Ӎ=k;.I{֝V/M UhnN_8foy!u܂Y[!exPQ#9їÇ4v21DRDBmiq2Rt5NeeBaJ l00omI le6#pR! lsAhAx# 'A 5²4$p߽,b>W%m)K a5Lə 2JJ0`&ӞP91I)Pܧp?, ˉ| z)BͳNi0K*-,֔pׂ9blS(^P`E-< Zxΰx"N[`(j'n?9k+Bhuy)Jʋ Z GI0@?}6x&.ͩ)J2dO-ȭPU놴S)Wa+(CEB>W*$Fu@Ԭ#krTܥ kGH~)EI>|D)_P3 +R?jz8$G?Yw C+BP5ӈxbG"geCq%j:uz I~E@SDCW̦sK(=>"sB]{H>$z!Bg>W{t=>E*|rY –)N3X_Gf"Qs7~_#c/KgG?|vܚoGyXcfW?/Tg|bݯ\_aont TCO3W P n|Kԇacb<.]x9t r ۰U_%+UOH~|mǖX\ObYZ]HD8pn5Je&f!V:eAx]P< ͑"E 4֗g&Jχ3w^Ivd6rt9(]|͂o0mAViOVû|_m%Lv㣤L߯6k/fooENJD;~֋soͮ8&16lu:b(޽c2[+{<fCww%ɮh6O%W.QZu7#)n џ{pp%g[;@sbolyJlkX~+1ƨf8.~/zK6t?aChZG~dLZy&ip>|+G}Ʉ}5YdidpeꁒE+J6z1VF>4:? 5Sm vK+ݣCu, I!|"'B{ˍ~U jjG?E h4'F ďG@h|P4'3BcXܶߓYYuO4¬A>6nC5}*ǧKywk|,s{C@Hh*͐CD0&RD ,SQ9 >6dQ[?'otʣ7b;'?9y}&0$R8RG漷%R.Eܧ`ȱ'+i4 * qjlu) xJ牻^]v{\M\E/eE5߸QM]qUր(>ax«Fj.jõL+ (\e`Hf1%Fx2OlB>V{ތ }}Y`'ܫq!RdE_Cds Pк<`:%u((bSR`QtYY>J\tYkE¸uP՝} ̑b]qf1imw-CnY`a9 ᳖Uj7u Pāҧ6dH)τPNAOሧJi5xCYfVg/˭Yݓ2jͷ& ugbnW-s0^^7={zL_9-|o'ˑ+v:_h ^X/" ĎN5?ŤK۟F}_ &hr633!MSlL^\Ygф~,'{`֬, Y;hL~nUQbPGtbhΗWq)/,d[1}[,uD'v&[Wn[EZW!!FȔ\uW} k%u+`fKɾHcɻ3f_qݪs]ʛn7XN{6wIQ>+6PO%e9lު$嘇;H묿`g^r}˺fh28/2EJI.8`2;o|_u,0H~rhh 0z_3Zn[b3шVM+#3&\R0&tb|Fђ qy;DX N|P!(I٩+fLttIPe}\ТpRc#L8$2vX߂!˓2LnEө,d[1@ җjr7aa^_9LlL?qAGR+Kcb^ \ޖ-B>|\!ot9#6SsbC$E6EcܔlgIb-)BN)lR& s˄1zGd"ͤt:M%$#\C2TX؁2!v/eo&M ]+Ϊr.BǁeA.~ 3!mx$9R-k^[.u_|ؿXi$ez6cs `B' +|z?˿.7i/|g~l, N7 O`?W⾥y7?ޕ=Z?jG.̧p>l#/]uW_4X'o{_}؁I*B$vn{~k εD O4L*"08Ҍ[$@p{fT4<;coaW~%ד)bPZ嫿ò?Ln:%)D "`El^0tbDfWcZysrdvgFW҇wol7Czd7.ٴsyedMj}=-7Wm!/-™5!2@!dDplpnj'H XK/ ҝigZ+gZ"M|D OfVeۏ~lE،nGJjaט.oш>hXM0YI&+Kmt[1V%mК)4ܜB:wֹu܄irG0TwCC#VwanvȚ쐡;͚ϱUm(ɻYߚ5`FHtf; X@ fÑ׷H޳fքILJ.l >Vو-ٮhXn`܌畹6./7RUvӖ rQ :D:s~e1yF)%NSн[RS"૤XIǴORΤE<ɴOSsBD)Xɐ"/l|؁)cߐð(Ti:_>l!룓 k{bA 5P!Lri{&},@x:˗ތzpv=Gk8dI:q_:A7xc͉4W/ޭo|R>Ŭ*0K5"De3dQ*NdQ_)C;?_Ea `Ӌ`pY2ލ6gV|[dXmi{FR'0W4UP+}{A¨9yZuTGݤy&g% LMd:{w;qNUHu;6aTtEG>/jfg0“XwrS7~":>D}A 5~@f1iY פּY-ˈÎ an^SDS35jp!@`ûNP{`ā3k՗iN|st5w;.xb-;Jc7AE]%vA91t}9na\ۭuazj(>Rhgs%,6[m@LxJ]k*K ziR ׻;R4M{ '~ "9F!Lі9M2AG@dCy$|Y)))Kˌ FdxHԉ |gF`6c5/ 3g 1)CR4t|b(Zg)f,;┃ NMSQc,i>\> B{̥/4W[4ɘ & 朧8L*wȅ@6fLGG—}(یRMCfD1[}H͉PmzD&$m=r\$&g9$Zw r8>RPIz3_y9`,Xng%r?{m<פ;h{ڵ\SG'V8r;7ο\!ZV;PY5E~OWŖ%U}E QprN7{Uy@enZ:-WRt˃8ɿϔgIy{]&Gnt~[⍊-"Oqx)CFْn~5[.6_=|oezß~󍺳$4 7k{J.zfR[7 tKEwAnw]Ixw?';,C3MϜ_u\ NRUQ*lFG=U}>$F%UVFo:`-sI#_dml*dqݝ3sQebkUY=K}tA~wUN==_ӅQUwʋ!\ ?rO k,w=Z)@sSA>H Z kU@uı)Mi?C1Rb]jɘmj0wQźi-[ABhũ{Ih奃 PvpO-dlƞ셛T6qo)@wu/zwʴnl\hrkp? 䒲0Gȑ4Rx!?o$6iun"!NݪْRmnIR4K޳2k܎E4'S#.0 &:|Z>:Y짓c3uչ?iF7Y\og8z8rWR-4_ };(R j+%6ƴotDdX>z)J|Lb< XI4ri/9ϑu!*e#J9ƴ'$;0c*փYgn+*@- HC3"+ vߥ1@M 0=Z82ঃui?V+ͷoV~RXqP݉:;9.KĄA`N$H;(+|C=ҕ!te%(ۺCHyJW>0b3#X 4o9miz!۲I~ c ӎI_oiM)j+Ʒ"~/Mqj :@wwqlf_= vWlڃa:K4-qf? &-7qy}̭L@%}~?4nGޗ"PP-VC*srLw@3ۛ`2u6s&i~;TB'o?7s7^og1MrK`qw"!$}gxL åPžlCp[=9̝.XxxEaB<%}F萲i 3n 7XH`I=[o|+DЁ7Jr (u]arxaI# a%4I|S})diQ@So恫3J1[Aүhslsqvep>J2 NH[6W?!fԃu"\:ɆJ!͚6ɻs(*,RpPȝ'3m*JvC4@ Q*Q()OEm!R#8pi N 盈$}ajɱT5dy)QW 5cK`çMVo-ZKۀ 1 J3ųJ5\Hɱdy% QFv5LQBC-h@B!}L0p1 |BݣYEE@\pBP1a 1 =M~@$c ጬ܈,Z2JRY0&gTy|Ć2:frWZGD@"cv6٘(zqME^e{B"j$[q%kFh6ʣxqS,y%K5@OLKlW;# z:16$GuƱ.7qnϖb5?V+RLOjU)s4]C)5nF? &ꌭ82Cя%#azom}9HZUK"twpI~C5NL.+-FG>m!,l2 D b: pjuD>rNnӣ am|vBy"u-9"e76*%ĀGR!e@ +/#tL񃉻0/k4] Blzދߦ~?C_&޴ 2 lx=8l(*XvtD%"hifݞ?!Y5 q tC ]yVb}n0j!(}ܞX*Aΐt^5;/p?G ؗ.Jvjh2YO JUZJ )tNrw^l ?_|V ^*HJT%B5p%%rl *Tz4- 핉CA?%RlIAY(/wq>ݜ"\uˣwVjІ`9`o LHQR@`=ƛMu!)^RJTSP  jETփ{OIF/&I,,h-BZkc@2-(c=U!DAI;#mH>D0^?Sm=x}usT :۰{nE y&;Ʊ+/J(\-yjn4GG}.Ŷ2^yԳY_J$Gw?2 lbEϩ&U Q Nv˷>s,='P2Q"`W?煛/}?`&!^9 <n[VR ˩r52&Ӵ_F;<\js 6Z"ɴmc%1ZVg,.E)1S>%V֞c@KIӋU)0R=!}r ޾Fci3䚍 ru2HpvO|,Mw{]WFzT3kF CoAH7R9b`Tf^&^/o~0Ɉ+׷7ooֹ7"]cOq/P!W,Qh5B϶{T/ًV9c67Yn.2uaCM<ڌz؏xDҲ+xSUW:1aaz)Ǟ셛7}c_ǻKW6aɅH!C8%uși;Q詊x;P|/j7s0 q4ßԇƈ/T*htjpK2%\XbF [:H}r{;R]/8}z0@l77ϼ쪫2HeU %TRRl{ՙF~状IIH$WZiRq)QydBtrhzǞ}|kTN5ݬdpRl)NN/קH 6~4\3|s/&jWCR[s_)L9Ju7Y钛Tr(>i7,^.Hp͠ήo]*/я.!;齚rx Eh0 -[V `]ё>Rmf7u$؝F2:)iI-=p}+$K,U(_8_,( "dcr2NA)v v^rE9RX5`\ծǏoXO/`di>BK>L) L8Z0VqvL1Y5@BP#A}I8mz':u\zVb6QK} #6L%5aHyx!b CJϏ6b/ Pʸ*m 62* q)7v; Xh'fz0G ;՞Œ3sN}V fmq*znځs<2M=14H} y0}?4!w;]Z|)՛4۾qJX%Wa݌P_ w @.dgq1p;!j-qOH/h2c#&'0%3"A'3crMquavܞl =Ι66Hؘr򡞉&igj۶_˽6_<tn{i/=! 0֍,) %QIib&vY,v~7ٹu0AX;_GqG M4طc`g&{1PHTvs"Y;T|l"G[95a9pa#ѲYпi5{sҦjqiځx]>,>ئ 'K_2lIMn07[քV]Ojyvź/CݝKD1`BBHXB2(88Բݡx%WiEHPoՊpRP&;XqY8*)\tnVt=[9<Һ#BQEwAJʹØ'x?zCM<Gsh]s[T#^cS]@8X&_=zODvFQzݡ&;9ٝlG:G;zbj;ugZw0NyNiv -.h2nw7iwIVBI}P˹Z;cͰ.ajʸ]̺#&͡lmuUg`y =@>T/; *p%hCR#'+O?02LJNPc"gQ})b¥ϣt0A@^D pd߮H3a(ƭ>{1:RҨcز1١k>sɩ`Fc2 a) ALb-4&*"8L0 D%QHXEEz $PD@)A5F?5nN>ͮMU`JHCl؏Nj {ߐqh{o'(~OCd]mQ/tstO=eGܵ~oy|5)-Z=pnOҗ qr7 M b¼yuoap03^C+MSU(Fv,jdf\@f\R dI@md?d/?} nN,=]wvӽU_O8ʥ;>;C3z ṁ @d<6ZЁZl靈3UW056G1 s`OMJ;(4oi<_s&T ނ-\rub`qz$@=-NYsHPl>H'#Ynws"6N?eۉuviq-h8Lyw9 fӜҩsݡ|!7'ödF S-M;%my6S- \uSsw9m瓘~bRSE?MgҺLX0.5yuE! Hh@4 b4Pkl~~ľ5O2J0ұaid#B*,6D" xSGI B"Ϙ~Am(Kӥ}!/LŒDfJJ$W 01'(Ht@$RA#0 ("dkVk"I(%ֶAѴHRNPEZ"#"ICT)DIFJC.(wdiY,Az5~mְY7R*Ż!ŻߏNo*uhK`*e~=__zN{tZeĻ]u$J7)fXFŕ ORxvua: Hz,rOzuEheIѽPe_S6n rxsZGkc0n0V &ܬ?:dJ*"*Bp$퍹kujG={:،wǛp`>M$pgx%a _ʁQMJJHM)[6=&+K NSS&z_ Y\L٩9d}G9."g MF(WEsj(ıHxP L %B(NOR*QX8 ӓ'MIB~+"TSY#ֳgfp]w1ML[3` gQ [F+-b!!Lp'K AL)D0DQ q{z넗(QgPw6=1dDN<s&IJWȓ9PJNQW׎M\N'(Z !noת1TyRESCc8kQ&a[{J{u=F8=ne=Q,&=F 'w\:T3Ow;T瓐4d\KΟfUőY/ SL$?^c<*{׽?ж,6?㧡sR<2}{cd[Xҏdӭ+VyL>bO}dʀe%>۸3Tɦ?>yʬjC򍫨Nv*Y7礧3gQE巻__~ b^),*2(3c]Ǩq*!rE}҄|*Z[$Sd3ގ8U=UX6~m#a,kmT-)z1{:2E Αh)=*3\J@p{끓( 'w;Pbtm\סLdЁ .} G5+wMn޼kp6|=|^ @I*ES"ߝ?>`& ut"ײyyH ю'mq1Q\/՞B8a^:vOKjsNm%ETW1jU⍂w9E=UC+! c\.Le$c|d H e&WL1LJsp{;W()hV1h`Qܺ9"2q\!@;0:aJͷmdq;+3Ʒ-Rx1<_sJ h4oe_Kcr+n7$@C( 15ÀI"=W !!“cչ;`b£#; ](7umPvOM7ɕr\ aąL(%'}Qf "8Q~\U23Nޗp?,|ug@/5vPl8GL P"0?.[{81  u #$"(2 P+G(1 cdTߖ)>Wޝ-ȕڨs誷(]G*lf%z,@9^[)d wMx0K:B7CZ~&tNŃ=5\;cp].*SQbH0*T|74ɽI>Iް.)T 52!H 90 +BEJK2Z+$*m\r2lVomͶVeVo]\g+h$ !82Vѐ0c0 P!3)F 8B2;*C uyv1%(A26f.үm,f  sm,YCN! sL !U*@ 8N%2zru]Ofb1p_ܤ= 7oN3 O{{?ϧ-Gг^G6!OST@WZqC  L94Ktc$oz70<| %{f|Ŵz4_oAU';p6qPi42[h3Qfu{! /u]K)AK /AA=e赝^Z>p>Jm}D`Pl_!++J ع[_$xO8ޔ=nC%!z!$j+.q$ЀY@%A81(4GmG QsJF 86KQBDB+&Cbl<~+cI^d]M6=ӏz>]Y9+y/~ڞ {w^vv &u$ TJ]%Ry(]VaT$#`0Z !zJQ-=*f-I '@i=Yg.s& s±) nҷiYU؇ߥ ۶&OU խy{|rtiK*n<>6׋+X3)%> PUM4,Z…>usR.(Su(o :׽%1Phz֞G.ʈjӪxhԥg3y}2öݸv0af=y^d9{o*U7_vnō 6z Ҟ l20/σi%u癝b8$g i}X)۱)%bLmOM ɋJ+bkFKY;DʁaG]՟Ȏ u,7{tbuU9IiDu5jO m heݺx2:c Y]-@$j"wko{_?bΔZ{_n4wן>nÕW?Lo^' Q'DO= Zw uWP=r;e<0w:`Ma yn)n'w~1v~ ?{+SYIZ }2Gњ9\3K׆J3Xb @ ߸O׿Y.x՚ U%@Bx|2Ԉ FG"(c#)=; RD 8ۋ< e6e-=VlQX}(zy1{fOUdIXC>1F)m9owټ%Y9 ipZs(EJrkƸ4`p00JVj<3|`8<_^srp<_F ߃]h߫hx!؊Ll"Z'G;]EQeʎ^dG3C%}hTJ'/5R7˛Um n/ S^:}PXp%)g'э)*r'zf0U^P <(n[喟t&h,Sf_ԍ'C}3j[y_s9] 2?%nWׯ)SS7Z*_,d7_iF?-RVJATwoo04!@JT[r+I_dj@Aٸ睂p.`rE5g"랫95g <♘!Pw:QÇb9揕>*\ܶ‹f8=r 9NOww>Í0䂞řCȿH9p$T聘 t[iIUeiX!DՁ1zK0#mt03@8=_/^;0D)VŲ &k  psM1}P䀸nՋ{4"j0 gy2WgWI_WGZAmue,*xgIФVV9y6_ӫ2Yjϳ)? 0z݂/ rZ}L8:nGwm_(^BT^t櫿2]LN St#vD.ɠpWk\vv70a7PAo.^®ޚ]\[w]g\SW(&r: չ{bu %+1DMRXi1 N[Pt4Y?I#iĨCQ\fF2'‰oflVvA0V8 u1z Wx^Ai2*XALYH w! N2~*C^?(GݑdQX6]P\}X%6;o1ЦN>NOO0ƨY~W8Yls3B0q y˃O3 /MOituP(]:Dkz"˘s/_/rYF[@ *O2 OA8Z48,U MUH@j W^MȈ:kHf7:<؟:6 9?fA*V\\h饒CZ5N c( T@$9}ւI&Y}E\+t?Y  <0s#D5KeVS@9Q428Eү2g SI*uC}@4h5_E9oԚEke}U˺NJۺ6hX)wDs5ϋE@W_z<0QF 7X'fJ]0 e&,N]N=a*!(! ՄVKȿ2:DvW~ D׽)fB^@A4^@A4roZ<yh5~z?wVR(}Ar,|ߢDgRD98R@9K|a\=[J7Z?mΨ-X@^]UHs:@uT 8AJmFb{\A k/m%}*TI_J^I_?]]I$J0pF1:G ) xО8 Q$Tq$Q9q:}H(7wUdvZ#1˛ARՃoꝳ簽a3$>tKDyg{vgn e1baU Bj+Kʊ7wbEqa46(KvQ4ۛGJ̨aDfD@  e JEYbCqE a\-,U}2͆e13*fI dh9k n}Ѡw{"qT"G e["j-shq̄P n^~F“]G v :&8! p161.*l$ \zVu͞N- z`)K*FM"cv2f|bpްYck7ں5r%aN04PU: 1{Jp:"! G Hu9K"2HTg'YD(]mH4e=v:qrAkRF Kc2 7xwȩ % xsȈPX2δmbZ0 VI)X.q >yȈq"`,T%)ګ,nA|q7s _K}-G[/\USrYxԷi.ȇ.X%RQ3Heuh>!ُ?;zvT(r 6:~wD(ZaGnRag_~3c('Ј1byf\0c!7F5bz)Um=%T3$%5YsWS~2H3n_T>(}pVKasLtJk@&)pQi5UI N[MͶ*LLȲDx׽3{UoY 7"bfwj ޭT9S&m eu[1ޭ y&ZdSA:nΉѻ tJĻX݊n]X 7̔Yp*0Lh٦J[!I6:)IҢjp u2HQc2ک%RКז ֜H%SBRmҜj./ϘD+6AJ> :./n. 9 %5ӳ,qв@7wӗXyTMyI_q,oG[2uEo_@nc "0 \gcߴs^IIYΞ|}YX BJu]H9xDŽYc;D'>`=p:-6^G߬Et<q(քP{Hax 敕> .nu$kMa=OVW>EOXOQBdJ{wn5PWrX,-lȶOO1d55ھrՃd QVˉ%A{iq|=EbJ}jK IBOÔ%))4X\\{~+K1axm@s>}yf1[}ʉ չ U8dcPBx<2hGP\,O/LgYOhrl<,zp-'cJ ?V+_ LK'? 7(`jO.\-!Hali@r|y}W!ϒh[k()} \=/0`iƬX+\(e>ZB ^J;tIv Ús60VRX)o+4I b"Z#OypF칍4RƅJe8P,X3[dRQ1 MZgpBLg,c,A T(NMڊ@4kꨠae8y)1=ڄ-ty(}38p<;p]X{ E5DeV ?b{g̢A#hj:'n ߾T qK*%$ /#HqCT6|P%[#NA=C-b<0~. *)AaX4[D ~{cW&:bh/I KQb GLm@s\`Xqٓ=dyXGߤSN% `QBC3]zV{sSqCʜ .{ͻrWÓ(y TO"#8#F2lpA"^h`q$r%5t*d)`rD-=aaFC4x$Sđ/ MR'$I9 `/Q]|Yk%b4h-#Vs B&DN#G9 i\yFH "$7hhXF0×" 9 LADDe+E5GyTst0AKѵz{>Dmb1k˛E_`NЧKsZ'gޟ/HBk&]| _[Zٻ< ˙>K=*t:PIcw{PI^N%G%f*ix&? R]jأYEy^VQ:\crt"KGc [:31GWyuKp㭔20 *_cnF #nF zekX*.0LdNTИYX'*URIs6J:'bxҩJ$#t5~2ܩJ(p*b2Ὓ+>b u?؁?ꚡ̽b*'4NH v1X17ӨPci0%r3z*Fk'inA!*vөXA=횢 Vl@> R~Kg˛tj O+lՉf6횚'>E:2Osvu?1:z[~r3vk-VےA wCn.gW';Z_8KƑK՛3f7IY߬}To.yB^^Z1.WϛV>|ʙ3189z 7zk'RH(6\tS受a^T;=h.;ëʓ=ev(w7KE:T XXř\]yų~2+}1 Z\Oesٙ/~ykkLS akV{*NWc BK 3L ĂDXÌ؉@9wJ4kGWmcU4$y Ĩ0Î$A~e&^uR#3@ cK;о;dw2R]}n/lCGn>H#ϩ"~ΨsM907"u-yh Zv/,xǓW{a ,,rLI9$S9QJ‡L3(5f )Co$}ח&uJEz%=_q60gm*|8JQ $ 8L""""uUTR-uX02(DmIfz 1lӈ%X dvEEҗ7e.A2LyDrJ)6ƵUL9sKi?VI^`g0X$7܊K݅oQ+_R ܻI D>zw4*; m[d)}?>O,`µރhF啷$C4\j摁#!Qz)AUKs"GE 5K9=mi!kH??RԁwLW]4S]1F%Z F`ԗy+u#'x>@#(9/O:4/r p~N0L)yBZq>U-:r$\+,uptVHAEmPk̕<# Ty@}BO̼Y,_T^i@D\g{oNjut v:챎aCXJ46X4QObLGLyw%)J,RX(Gzz #H!LGOAl@(CTK/3 \vEEͻҗoYd P:Ъ7wyh.Zr_];q….m ts"|nq+skCdۑV-%ytڝG(bxhm/? Ty(K'Rc6kCQ>Jˋ.Ir3fx*t LJr{z4.<ʤ/..ln 4,L@ {3in8ouVqb)dA3eJ,E#`&s d 7ݘ„`dӌdKȌ|2iڔ Ĩ x=rgARpщI|d>jj#;i⌋сF#vmHěH98F"Dڞئg1Q,̿ DOQ;# WbIH(52p5I(eNRd# 9\ L:hGR{Vd؊Qɥ5!ޑȔ8HrLt`G0Fӊ:GH.H(T:ZGHLPQZpUKs8,4AL'ji̡99{& %V4BUpY!H^-#JHi*2 {P EU\u_t/A( Ϗ:bpuꙿYլ8??>-MPԌȇ Bg_P+ 3wGQk|s~~Ӿ{1/4FٗP%J&?wk'1Xۏb-| Jl{\Q0xqk5Nhc3ײLAwmH_小~InqĻ 5%$gbKZRV?12"*XU(yZG =B)wxlύ mT.fH" Rm5pz-DjEG E`tD)OՂ@HB~Bzn tƣW7*f8%ewi<##<.Zꏀ D,dYh-5XvFɓ-x=E8i80k殝&xRՓʥ}N-_ .5w{Q@T.>cj0pƒvޔh= ^![XSa51Wt+[>\m; ?a.ŗO_>Vς}LAํ^+4`4C2- Og[S,FX)P@&xI/UF !,il?2F %'X=,XB`QU9kkDb ؔKJ`Eb0; z$VB#dCR1B$vGBt5>DH3-)WUEȡ-i Ji3>XMr21RsՐ:| '"Hj$1ȉ5NHHDICTՑ:п1JDhp'"nmI>8rɔ TlA =U/^K]x%Iį&ˆM0(f{Gjnݒv؀+aBlضx(%yUdmT U $,CGXO޷jaK5zߌD.sN`ȣb;uQ cTt a+1!#+bRRI];#0E^! ΄JJ#I`q(^ցDfˌfE[FYJFZ3S心 ;)|yFW 0S]ϼ*X&83T!~"P[TK&l!wR2"$s x]NPbu*urEvi"`ׯqǠ#o3s,r'bJd$t]{Mo+QOVKv18t-ƶ?v nlٳjr!fWyeɼ!Hcn Њ}+tُ}eϤ*Z愭{^b~$K??mW e)T08/f_|azE~`1_/Rhj)H$L`NR#WiJDd5yvg*KLvT8z`dj- \cCG\.Mz?=$jF6چLsėQg r~cT@ڍB`gn!<%ԛ"̖|ܒQipE#o07;v?MşV6=<ـs@~5NV3ٶTl1-lguͳ6TA?;xQk}I; Fo继4ep(CFB 3Ga37>Lt3*([`P~ -0Uvn׊)}Xԙrg3Yrpu(^]^L޹ q" 85)'8h*+#QbcGzMw3bx!AHhշ:}Νs<\5wnIFKsu8*%QjFqF+PBj BJtd8 jVSJގQ-0)݀9wKwpxnm6Zg@L׫_Z/82LQAj#B-UdgưR ʟd< G5U8kWI\2#䴱_e -NephSS bLH%H'EfuOC tG<*?[-<4J%$W4ARuZx%WAGDPKIQ"mr酏Ej NAc*BQ>Y )JW=<9;[RG8 B)j7CDWRFQ _T5{%< }5f+`:NArpj&fv9UחIVZ/W}oIbNsCzpK_z SD /~ъUV4"Z_dXo[YE^,5fE |)#l^)νaQlʹLap@W89J_=|Eر\|-}p˂e,g6ze(֡[Z>+q5[䞖F6'H8x.ϘW2j5Qg(չȿ V_'>Zvh t*E59&tX ݏuϨ{ÈĮŷ.k^"k^V&+FF̡0"_]NcĴM/JKp1 K^۾rYQ`Fj D*lht0hJ, g!TLR< q7yh9yK@GM#3~܏}e곓DCl ,YlW'G!ɋ7ެFMYAeE^cR.=3HAO4VԊkaDC<#(+"(}FSb)h0 #DÍȱ()F09 6RRƞͼf&.fc; "aVh&r9odE6Cs:k!wr\N .pXY$k%5!Gix8@8.H5ͫr?}6DS:O6eլ WOҩn98?Ӻk2$*Q 7A,K^p~ݦJۻ ɊB3x˫A[e2 lCV^Mx99$S5iN@O\1 ] +O&TVys Lw#CS&4  ۲6)a *BpmEwX #(v.|"J3/{_ZG(R2 )P}udOk)9H=+@[֦GT(@(=rr@a ǂ9 =(<0;$gD'h Lվ XBTWR1Q"ЉԠkڲ6A3D؎ъ[ -Uϓl| /=tV&XݭZ?ke"PBfM5I#pB嶧CɁRHĤG`b5opcjLSRHnr'oBRq2I#| ],,!BFe{j:8 if{8T:f`v!ULgiN:-[$wmmL$UK pN\ͦjK5X8޸Vknu}c-;ERpe6\f>2]$4[ׯɫdqjm,m&M6qѪ5+/';6UArQ>(!x=Ӣ>LF~a 6rbSS{!/ Z n[0I5~\Xݿ*UJUmHCZ*s%lYC&H޳n-wZW@J˼;OZo ȩA~;/Ff2G\>쌯[3 6trxo[~rRhy/ݱ;JK՘q5/ǺMٛlfgjz65607˷V_ߗj/3 >Κky||9NLdKOㅹmz:Lt~Ĺ~qո9 hfkwZZ,`q:?6 kN7:dx/5^:K~Y=ҍjIGzr^^f1n@'ch&ܮfwvq %4ƺ`B: I0o4 ShC;MB!Q=*Q@Լql^f~jo 2MiYTOϛI7[~HAm=ֺBxIѴHO8(O.xITꕨtipٷ{|ch~jɻʥ>_,C@ )tԏwpr]CW^(үN0Y ƻJ+ ;Lڢ0*|$\o7E?f_-i5 Yћ &G$w.uBur p ḛvp&)#{A_u)Rw^u'.vgnnVމeQ|=]&06L05̞2}E9#'a-4&'S}QidV" %e(Iv==C=PvWCt|0 :H48쎂y"DF>i?/;7•}j̬@oHBƓRޗ36\1S:sjxtaeM_> F44zuCLhh>5j?@ῸKP_)/Jk CN([VH"D`D/Lvk:ctVyʼ,ӳN,Xj 5~z{&σg}d.X-?l\|n{`{;T(l b0f# 22B!0V 1xRK[i5AHZ}j"y^<{2i =&=QyAE{Vi绫҇Ԭ=Ҭ]oTD >zФ6nC;1p{F]a#SB.3J) :ƀ͉"ܶ7X UyWp& !t{8H)*?D`Z =(ZY8%Du Rm2Wt5c9)=D9ȩr=ٹhaE/䲶r!ʅ䌛Rr9q3H՞\N̘-%C-sX;KΝCd=y.ˋKCe/=J1)Ilt[|n Q&VwNNH+RB yKO&: o7rƵҖ3C8 k݈e^kZ@m~kKʇO͢ip(\Uy^:Ɓ]ÉիCfv.mxmrFa-wF!䪾{L$"q48-hINo_}\nu,XDقϷtc]ӍĹVY*.;X0kC* 1 `FC@^(lXvk_L̏NK_]\qr_-<#oƀ_.ޤ] ϫ9NilI$Ϧ?td>>LWOBIr&9N kABP$ ?aG7/ Ï } v @b?N?M^)dxM4`sOwz (xGSR10AScTPfjA+JZdK[0؄}\@y{5Qv0NgZ"{dzrp˱w;;+;3[ itN\KAS`ch m p_ǎ{>GIc7܈I6O1H8Ns`W 114cv;GiQ!/R_L<[ju~1xg3/;:7 u~1t/f^0bGYQ`q*1ju_LIی*% H Y_8bNŜ>73}~1?/|gss:Mgkf->>-7FO7 ZMG\!b5rBk_ƅ I^- ´c_ k!SPTb&3!iR (ösIƤ`|n-J!ulݟA.]3XkXbƹ.0+BSZ&rXe2eưQ% ;&ΕREVowzs]bge)ݱ|yM[g@+ w41GӢH ,s]6]y[36oXַl4mT( aJs4*x!zA#L! iyۿHӇWx _WE.5/ot0~QjnJkX{pz@^okMCxO=^9$x ^c):qn <(nlɛ[>];B;yr(gSW!.-yQ,\/9>|58fRٝ=||p(ܸ* 7_EapR#frj1iN`jBd9Ә[Ҝj r=%tuIџ-2XoCzZ6:U 1`7s50Z,\O.=q:M4:\zjCsNš<)q&P]tvl1=LcW\dݛI&WOn^Jr;7Ld2]˓\cp}z޽I6mzڽ<+xN;H`m|aa}~R-nk':`~o(+GC\Et{(ŤA m udZ&B\d:4U4H4.In-2Fvn'T-<ë$ա!gA:n߻1(:mTb΢ SI{YjА3W :J]vcB1(:mTaNej]nY%٭ 9s )a7t/-2Fv.+ɔ-FIvCC\EtJg*bPFuBۨnfL˜OZd:4U4D8ֽ(b79fA m (w;2%=v ?KYА3W [5pA m u/d* ?-]А3W˜>yx?2BB1(:mTbʝLy{1ze٭ 9s )__*ZSc^{ډrHj1:@W)vz-(_): 0"*V:-A!VzMyx1VKق^ )cV٪!X#.c~O4(}{v{4 1VKv7@c@ө;1?t1sOac9ƘkEbcb9ƘLbÔc1\%pc^v;ƘcNK XŘ s1a uƘ s1j jx7B1c(Ë1W1c8Ë1?c1\%0C=3\s1j R3E(Ƙ"ѩvcLc1Z-|v1f.[acgc Řj1(Ƙ",Щ71 i1s@^Ypb9ƘkqꊹЊs1i eYR!b9ƘkzM]Ƙ%<ƘcVK=ĘR1cZF|x1fצ1c9ƻ?4{hKNӞd:] L1|32mFR}ZK%⾷ Y^A 7?o} Z 2ޕ6q#MPWC ĈCw+}CR^3C6]Di4g0~a2ɜPW+1Ce(]T!QRʬ")(%Sqb);Q*dIHۑ=. &R?oq Y*L$/S6)䏛9) to^v/Ǖ~tO6-ĄGOj)#Lc "g^)h@V2B$G *+ 츁=¬:xP)b O!FE4c@'Ø"QF\ za"~Rҳ1nd&3 3gaf`ft LZ!B$ZjeQD!UӒ’8~) Uoˆg~hoS{5wovtv< bd׉пM{3x 70A(G5P^|`|w;YʝX437M|F9Ѱ8$oLV>d˴ݻA4lt`y_P}F#dVPi=ځblK}"ع?LR&D!WQoT.Cv9Q9]'pKfjgE]&pMA,V'x u>l)>.pyLXԅ}T] f>4E e5 A53xbOqUȄdeI(_qKiƷ%P _˭JΎDV{5?#̖J3ʻ]Bвŏ0f [EgL2u.a^uPG.tDi?W4zd,8됷FSiQBQ+ ZXּ͉z|֚8]:i=aEՊζ1VCNΠuX+_^Ry`V81M:SƊJaVNYm)/ccM`Z[t=x8JYO%غA`&e^ 0Vw<bb羕? 72OO]~m#H'0]ͪjv'H  !3htJDn<0Z><::芒}Bt(ŰY]rvª`.!P+6 j7$ҪCA|&Kp[e F0?4!H L)f;f2"~AQʘ*.Ptc+7\HDi8aԚ@8! p0[ ԳZ)l@av*+FJZDe[ҙD'?Wߣ00c s1vV.j 䢐V7G ?IPh ƣOtepB0,5tj0NhVfUi~h~ V+zӛ^u4Ckwo7'+DFT=槏Nf1=*2'풩J`k=~)GHu#&>R˞[4Rz:}җ3WRQF_؏ht5l:.tܡ iiœඞ.Ҳùҥ3CczzUj>.GiϠVdl: G`BrR5/ jM`Q8N/ ]o'OgKlzqse?gͻh<^ `E+:pc;U_O?{ʽBc>%&'1CtU6dnZd ,<7[!-DI+?Thؽݶq.ub6O ̙U=ܴFAl/'aMfIk?އ~ݢm@@0\T!2JB院f1zdsmcyީ s6f ).><Ҿ3O$M繟9OA%A;,I2k1_eRˤIM/jZ̟ 1tL \#DDpôF%:s,J/.)0%B-gf;\-Nw\Nd4g\HxGPG8iG2'H2>X[4Knjq3Νu Id4]lKfa14<0*,)5P 9ovqnhA&gI.Wf'K}]:!8v RaќRpEҔ1AV{ >ވJJRA8#Lj]pQ }}ī9Mق Bkjg61mU垹 wMz\#wef8R75H3{IQ%]ŻThQC~>X]$ԪI(W,]RE\Elxm~JZ DvT>Vͩ1R)'^g1$³-uƪ,Z!>]RL&Zŋuon/ 3Ҍ2EBov ۬X*;l]2UAקnG1%P3|tA?:Vi6Y*WR?LX'q8 [i0va5()2N0IJe+IV-6QZJqT$8 nAIJčQ!ulޠx'Y 43>aB)x8zGѪRWg c\_(O\)s j~)t4/lJ>as w Q,dTlK#\P2ϖK T^?/#OwuYs)>αc#Hȭos Bc&sYx/K]y߸û+7o~<\h8zBnLADJ=DWKH!/IDT3dn][L.Ӿ˴oᲸox3(hs%X+y@i=0#1^@֥oZQAV~B=f6nyQW&EZ{4pUX|Nė -3(so(eʄsf$cښ8_A%VDR\*'o*& S9=0 {8!, t}m<3> lH$FH̑P eRZ6AP k_ z(aQ`2jmG\(=AbW!V.;|;m:9rl:[ 3Bdc0'q.߷ }:2JU.RtqU. h1DMg$jAjx3 Z elߦZ6m_b}4j[[g\J#P”!nYHbER9"fT`ƥyEhK57APw#lm5@*|d(5eV8f "8 @,hNS{"1T.Tstw!5$e V)AsPmn03SW?o^>~ww; NfD rHJL\7ZW I <ޡxRv)$.UZAx-HF8 XS@uAT)7>;TDY79#kHhß>=WL$Wt6B3$910;൉*@$Hl@cj( S%)kAb+3LءFo&rWOt &DD2ƶ^fpr TrJ:bgm0 `ũRB [*嶈@ VaJQF붆o3$vsQB[3FRYC E3VhhmsdFV(_jw̮r b/2h8yNJNɃwˇ.s2'wo ?[yxxLtW*,-X:lC:% o|KNՁnS;V޸j'繬nxϒv$e0ҿ/C (s#sӛiw(LB|&n/پŽ Uɶ {,8@%[HSo/H/2c qREɔpٯy1(L٠Eh2n%{ͳYem)B*ޚoavHH:O nFr$`@#G8PYn$l6\֔vϥ|{?zoR0zT̳>[zt_\!§X?o\6N+K9 c"K|ܥp{?Zd=B pJ40"`)ViOˋ-Vxy2(VpͫА%}$.xU5s$rhgү#;k~"ggU|XJ*ATwIg)ݧoG=Bg={H ͵ YB"^\) IzgnGʙnUww6$&ķE xPn&\nFU-ċa}]+xd&+1}a>sUIJC HI$˔MC2 )aXj+:Fq@<ͬbfM1*E{PhZ_d-T0v HU7Һ)7'G\7|P_GeMZ4٥6료+ɏg4; 4/Ӷ:QL V>/'b|1:˵2ڠq"ifqDR- 2)b$Y`riʕ]brV@܌? 7딎* (bkUϠ)gg&QU7 gh;^9$BXm42k'&ȕt*eL0m vv@]l<-hoh>]qbjN2ɻo/^ZgX1׵w]t9!am/V>q6*:=ڲգATvi %|ea y.:4;j͎Nm-: fDZug ۴v$vlT`&o_#}=㇇Y_>96LS9yins[>%[?@_{ax2'FmIGNĚ9!lN:n?yp2nOTuiS#j,F ̬"RyRRyQă?۳^ޅ0(8j^}Iʮ~>%(C03]+㻿ϔgΔ:F 33L+B\s"f -x9jqFb!%'B&(:pk7Cp, JsQ 77$pO˿'ok`Ϗ&MkTב]zY/^ v},I n>,bJ 8k @' [%f*L325@!.0MIHˁoa,b @o% 0ET$!V*Fn- U&j-,. 3B;tryEׄl*B)ʟ;=#<=A!]I9iMxu3v|F[/C﮳K(ei.h1]>ޫ7BWz@JzrUR̢cUt2m\LOw!v5W~ggԳ8 ͧ^Cʵc*o< 7w {U߫V+M]VndǓU*Pj"BSXEe*F9渓p2ZK>%IG,eǟ??/tVl?M B͕r˞63KoXq^ȣˀΛ3sP⧂׭-zAVΪ^QKFR~505y$G+O :M$e) -8Y[ L+]杽+*yh9o48Ċ U'-cS0}ni~N]wH]@&igՔQOV@"ʳSRjQd^ogf`g ]G\);xVPײ#y ="q uQ(r֦Q+Bg%a̩dUXie_fՔ~jDZMzwM, U-֨Z[1[o(ck j+Mt`m<;ޱP> nF漽Loi$%:,R '}vEx[#Cmɜh'D~Yc>Kg/x6YGbbb F !v /J ٨Q*ȴ-DVsmW!2fV*Q*!`Rbb8)WF3 32[[Z*#2Pkى(j5/:KJ1%q&wC$>ViLi#h9),dApꓡ#A"+QlEi4q"ȍ5Ji 7(ך@vӐXպ|;Z U{++Eu!H3LTSp+UocսctbF,{ZOu*g)K߮E/҈^sca+u-+}.^]p6˿4\+* =dO)_Ҙ74HxmF_-_ fw^oU -eWa`n{R.`-l߅ ufj|/^=P.ZM[̞>871P˓iA{jjtki*w?^q׶W 8VQ~MEKtn N_M :(O?d/p6@I>V2$ L& 1;abF3eeO9O{M;;cR~TtXM3 սNu-_4څXm.]&VɃb ?ENQk#QeGL+RY3z>iC4 ;vj%ZrzC=5ͫ0;Vތv}&*@ۏq=T{e7(J_%|O}.6=40=}J<]ӫ1O.k,5F3.#i$+^2mvFvA}G~Vu}ڭJ6|"K{kM3~ڭ/5]KFn'Z݆\DWd ťµj\Cto+nt{(?8*[Bݹ7^}}__2^'#ͤJmĆ;7<M[>۪Jq}X Ȓ2{aG^X ~FMS 1%.x}}̀>jv9lmmyq=,[0BpY' \D^aɌdFΉLl7m !z~; K ,__M g{ O"x@p,P֌ D"3Z8l3S zKUH~nꃨ;nCKLrsA&ME3}nɈY'ݪZo/V/@.dC F+\32_/RD-(mJ[C&̪((\z^ h~UU2Q0xiw;Qv2mP<=>H֢ܭQ끧%,7;mŔ2$6ňai/Lb uwSV]ܲAêdLV:ooػuk ۨ`1Ra*̣U |2#;)qazqS$?0 TpTˍ7E!Wwɽ'w="L$:Dg AwZqe8}ܶ3A6C'y;{yߋ^Ǥ⃛tC*zn7R7H0Ya3y^k>sҧp1kуX)Ķ;g%,GG͠ȈPL?dFile絇m3}7lw3haÚ%Dc` vntg ^ۓ#®P\yo%xwߦ6woVi 㟊vAaUOgzQ Uel#NH|b2ǝ5Gn2e$`vp%kTϖw| uȅ6vv@Ρ`nN>hxs;=-V8:3]@A?o :,8>}h7S4RA_Nk[_~j7е|g ??[YAlMi+[nj65gߍ|'.OnzT@ɥb#҇xGqh^G}& sji<ǫyAɨiISviRzDc&+☑RO%Z_ݼbB0٤9Ѭӏ4#'ܼDqpuK es^xY}k5e𙷳.~no*ԉX+d󴆰ZCk 8m2_˙jߙs@:x 9ځ!K/^۵(v̥G"~leWށTy` شBbC!V+c 9AYKkN謃 H ?C+d+P-݋k6Qh~pRGrF Ǜe2뫐2 t7c}qd,G.ˑvS!bDsrΛr\x c&oNZiZ9R,˕\SË Lnݼb4 Vͼ]Zm!v{|E>Rq¬ֻгr/<52Ędr*gQg2 Z8coG%^ꂙ%?0fyN+V!5hVqN"yH:yZ 4oYN7(Z=Dv -UNȸh p'hqemVL+Rrsv>@d>BAHBbdN:Ff+Y IN=lwցn`՞4e)9W#}_) -tgs,%¯E3k=zwXY $?Qވ2 ߊ-z`֛[_nJkԛ;zf-j0OW93n}ͿcͿ\󎛵s;Ǐ>M3FcG4PiAzGTWDb0DD4:cKab/^+Y"l_tُN[(삉m&Y晋, lN\CE"wk!yN1 Ѫ`-#kGlZsplf- q.ϑ[!RI+VT p-9*4,/A+<]i[]Tc"q!z\7^RN֖EOĐ`h1R5@(uyﮖ+m\J0ay[ i A{a"->HN (*#C7NhJej\v9Oi$pܻ٩i)CUszA⬙\A-ҦFaDY-jH@ˠdy4&ZMVNAΓ6 krݬyBv㘊Cg+B%KY1E'ALb)"U]<1G+J #$^+ m̩Y3`4n~;)DV^xK^gNcNd! EnTArB@B X(y @w@I85 ݊lI1n'x^,dp"6Z+-s^ua-cl">`3- bFӖ* 7s.ۯGМ>A!NRAXv ($i1y苙b'!y S,e@%#$1Bg/ `}F(@:GLL>|YRd[Iѓx\3"'}H1+DgBQa1TLr(gIrŒJД"\ 悓ѭ\Vb&5 f^P C+Qe%Y@ uk402IF*V^hy?̀7TQjR`aߛm;Vr}KZZV=ؖKKϬߣ'JPsm+RXmiTș]L43 $4nK-KlU*C'^n #w z]rw0:lfn.)|vv2FҲ?힉JB zL^]o-Sұa럍>iS7oHttcª"oW[i{YZӷ4ׁoo&aEWq~n>}w6 ? f}3v4\ wHeok ߋ /;,nd/)Ad[xY$[-ٲf˓ȧ"YU*Zt0FXUY !)УsEq`c<&.f6a`l!w&@dTH۞Hj^YHPuC$U*y-z:(RpY Eg2^յeՕiCqGIN鈛=p"rM8p"7XXi^h1].RwcFϚK)MR,J' 9%ĢH*)P&dT33$a/sڇMOnS&AqsʋyЁl$]_Bc`kPb9ms:mqɲ^ci{]j: 7]9 ]%#!/CղW,: z!ȃuTe z0$Ѥ= QKVk^ 1T`PO+]$A6-VB% dUqpD HZln*2eJ_C5?ڂ63K`4)Ʀ&dܪ+Ιiڲюh_P '8@‘nrb۬G5;\'@m{HP(-k TK?o|BS) 4Ji9N(ۗ>ӐPk/2]':[uXEė&-H-?)%*-'աv"Pk0xY؛&Mn.Wx4p x|MWwV[ ;.fw᫅ï]hؾn{N¯2wA[C%#Ы-V1w1qXE:pXm7Z{꧴K:דUNɑS4/;2~rqV3D M~CVbG£7_f/_%e"Jj﷕k<7*Y-Gqh&/dz wzF(qsvZb -ꑐ:?Tĭw&G^ȶi3eI{dG)nNF?uk( UA2`|^V/x.uwؾ >H!J'oS1y/gB[|n,yC0 Pg(tKya$g*G[2-H d(j) F:P 3W\ +d`{{ Їgt#~HmTO^ٍ" V\8P $;&EַA!*ww'Lxwo~P6fҷOnCfaeO}D'{p _FO m5Ɩ]8r|F~M!^"YԮ%t jd1$B8M#=QEm Ztn'gMW kҬ ߖ^gFsJ킓C)G8`}OgeSλ]{@|!E㮪↖ѹu.$ )$V4 1ߨ;Тj&#N!SNKFN!y7%J'}UM橵}@j^=tèe>} Qz1Ut{^e +Lq`W:p#3-<R31Pl_yg3()Ղ>z@5V FRQ3Xģ8v91մTRsNkYZJOuDZVVeFMa,+jD 7F1EbuYʔ!mTPg@wz(s@r_[ hf &^0$dZm-@)N'k}\^MG9c<lz,o6nȭf]\̺+BB+|8<4Lfew"Ib@O: HC48 WM\k<#{Lږs;F{ExMV9ᵓu-{xKR4(s[Z%JmXe!W;R_t튫J9h) s%@cd~X4&ﺉ]5 m-0R'6~m_k dUeB-B$J6ܛ{@G|a,L5tue$. a_L'3yi a4)k7W?uY1~m>1#1!O.W,5-/z7bOqude $rQ.3ڃͨOl[7nc$W,Ca!dNB?'C'?.0uHX^X $.hymalpC3sy hU"K6^IİpTUeʈaP zI*Pz3Z&<bE>9*&zd Wg~3׏ R;WL>"PBp8_l 蝠Oo%|]D-Tҋb93_Iibˆ/dܙ!:s"{E[DsX̮SKy̳ӊhdy(|PkTf]EɃIiѲY|DMa\:dV]%wPſ-I"̳w! % 1);4xOBɪ`WmsZڢ0Ł1=YG8|;̀9!Zx/+xU^BeejP(h*=q&A]xǨKNk#Zni~W-2Ʉ/%Dze 4EG--dU׾d\Ú+f m uruX_Npbi)o-M2۩4H~&H5jY26# q~: 4!խN00`.:(rW;lqx'UH)ĈLJ^>2Iܔh2!~=ȅ5`ʐoOd1t״弚VrfmB:o^,(+ N|L7 8(͸8p8w 8g9ڹ"Ɯz6hm7ID%:ھdE+9ܽWG| y.v3p 2i|&*D.CUD^6շ\tWp}uubOZ [x'"Ł1j1#6'<`{r |Y LEMj[^;1 `8f G6C)4/-)궶Be$xý,tYXV,RcKq,Y1 'a#X ki#3FuѵzYT'DrCᅀw,bv\;~q)a6;#8s?]>b V*k\\1^}L_ n]%.:[ٯ2,\Jų@RBCxs-PCj).o3~oϣeT55];+4ݬ2|q*yae\25fq!dټs eb&ʦۭ=5oIr}:Ƌ3*B4 8ހ4d|`^PA{J$8+:;>B &Ds,J%U!:;т1TvhM ˊ?]*@N N@%>><4OK>Umwj\IMtksVӻI0jٲG\,}x ͪͅ.ybe 'ay; n_l_Vջ'-dFUЯ^iI8 -CbHo1Hm!\CmgMqBjD!`\%;3 43VcBz=ڮվ0cם ݆;ۻ( }DT6:GUX4gneI+OHgֽe%{Qk̥2G~H%AẽFYS0vYsYnɵc?G eӻ8ZN&F͙KN5g\ęH|:`&`mFTk@bxƫpcJ;7 qhc*Lit5}8tU bJ BB'$Asnw'yt"pX-,E_Tq˷SvKLxceqtx>}W$k.<TL:+m%'\{USvKV_odL27<OIrZ{Yv:볭D7fJV4QVjQyJ jW!5D B;`p)kqY\D*gDa1ef" km6bEÍE5UmtF4co(fk M-ߚp< p [ BhՆ8& ZQ䙉fa@߀FheĨ:_xU3){61B_ox\C&U)$`ije1gҵ#m&VVnYo4[Vxp$1ӌ +A/\b28Hkyc)/u6`Pn >.a{V붍$+Kt. 's}3oVDr%ΛmQ2#%7{vQuA}Ų`ۡ9v*%"sLf}HG:5ÉMՔbFT-k0X_^lY/k3mDed Dž%9]{=W //HP}GW)FG pt҅@xȣt`O@u!j= )޼պ0C4菸K7 suufפ[5ҘԲ #41_U rUY-.mPBܣ`hOQC|=ȲX* GAumzu3j#oPbNY{~OcX'ǓE/9tY4Á =v!KLe~*Vt}*0ᣙ5F,8M5"lD($7bt`9y'z"AHDuf?];u;q;ٝ=fܩQ?1qwBA@Ra(ta09 |kRZ޳iqNwgr1]Sδ.6^lot7mϜ(ci}\B<䝱~cs#'|>(m&~:z좥+'"q\&<I>J;bQFj7aM}F\rvE$~kM'4b`B )5`sG, /ED2Pi R_pgI)gvV`h1ga\bA",V]X% "̘]X)$e? xbhY`p[C sFɾFszb $9 Ȝp*cTEt􆹶U)YS"xx}fÐ,,,C:Uffxǀ5ڭ@a0$j/n/ B-ee~w |r"@OAdrW#mB Cbj .]VL0k"^q rO ` r9ɤc4҃q`1 .$*.QWKK Lc! {{mmxٛ=C{`?ދW۳'^nWڟ=p|7O} S9?텳(__ mg0\n"f@[%l782_6S~'J،>;[ ;#||_7D({y>4$n/.OS;w ˋKR*:34x;]39>{oF@gM}.4?SF'mr~}<A l}y@x7UE)u)?/aaa˥/dLUs_ 'Iwstsy+jF*$$糷~q< p; ͧ.`O'lTn W_̓ed\8V>n#hSU  C~]x7ht|xlr[6u_ė' a"$LG_\<7@>W[7۠=?~WoEwGu ~KWyp"jxk;h˓㽓,}Ohii< SS5|7vf8L o/v#+x~nn?Yc7}!>M]3;5eF"pfCPWfqz}:7T]#4|P_Nzrv_Nl,18:;}ߴd͒}߿EVY}l 1+8#<|)qRY xBX@D'#!IaWW'IKn[y$jR#raI`3j+4ǝqahM.i%SYE.i%m ՅK;&s%%%Z&׷Z2tp2YUU4$CϹ͈K"k`jp+`Uh$ :` Gi=v,?|䭇zح=쭝e#Rd ^t|N{jR9K,by=.4_ťw'/Hr",H.[EBjD"֞:/#b [\t6FM10*L" D8r,-Dh-8Xe~INp@"3EΊ̬58CQD%nV&7pN~ཥ >4Au"G T΁sK.2 ÌTȍ<6^{~5s~anHsۊ6hw_dۿCMj_'1w&$ `ft5Z'$!(j^n t"%v:9un hv꛽V"ow@>?z}zb匧 L}Z/v8q1~Yޛ?@GYdl-q3Cu OpYcW obr %r~؀QQKfZrQ=NX)]e˧ $$F1lܙ>"pҘ%l.)2arj ZN`㖮–EpQ GiQ6&z"6w0H: PaZ&, /Cĕx# *.ǁ{a63swњsvx|JB%uђB&Xզ3o|k]EK]X-LUWt\Qt0$>-KjYZKpgngM̭ga\73lFrqR<ɥrˌd-bټ% Y>%+GV3[;;1Ā6{{wl%)?i9EyKQ6 <ǂ)?{g&8`:8`:.3UG2WC]"{ 2 jÓ*I|˓G+S$m rILlq_¸_UV%Ρ9u{֬N}4B*k<= pu)m}𠅃5;l8!0!NC  q5R*#} ziBZB ܡh.es?HH o€ԓ`I"<_6`Dpd`R9 ZFI/R < ᅸʘcDbgTA-'RbDjX|`%ج{.ӞȄ8Q%yAqϙRf xiBH8xC(epP`90au@v힜+k'NW;ǀ)0.>1I`l\C$ ҔEYLx!1(MTRP:R0|.pyl2I4`;d.,%S?`pbKS"I GW9M(S,qpzoj0D%% /sy > I}j#W \)j\g!'4a0%_JRJNs+3`hHNdEL |3Xw5b;wﭼK08<ˤNNBȤՀlb)mA# ܢ`g`nQt hxNu`w2}gqg!G>AqdLJ,PPę E8!8;0A&H1+D l%M0h s =`p-ˤtOEF>J , Li vM"Jr 3,K#` δ 1@)7hMe^Qw$20'i!@ػc@VtW?9^$ XJ"EbLX ~@RP֩IHr,޴IOF,pdĥS!%V;R,a1c fOA`0)]h:bձwt3?FAFxFmUY1stdF*7ع'n6j)JWF2ł⌬ D5.N)5IX2(X~@҄YL.x0RduxF-unΊg1C 8 39E+⬩o@*1:c{^S'rr;f7s彙_U*d9ӊroR8:cRD)WZ~>҄ߪ]udtf•\^:38mԸ^O6j! (i,C:ULqv'?:ʥGLqUhE: q_ f8?OnNg**0T!SsU9 3P!<R1u(VeSjK!UpԤg>RHT7ޏػ^GTPو]JjZ?H<h5o\j]8?f mnGma V\-JspIrd4dh (m96s ㏷sp5Ο[`Ʋ#$aU&o! ; )%I#^)F%kr۳˵+B<1ASN2A8<"}-CD˵+#q% фqV#64:Dh("Fysh҈>euQsQrb6sشGP*&HYYAqȶ$&HAJVPdĊ= e5sƛq-QԼep1} ,._A\C\W) A`{ zsF(Z-MI5B/$Ȋ4uZ>9!6?O$c}%[}6I%qP| e4iAZu=[悲k߫It؈YFFNyߑ;֏e8ӼfG±WͩQ9?.ˡqT ciqvOۿ3c7^1Rxe>F#X9 -g!u)|M'eth?/V'2#~zlN]K໮J~qZ O[χA$zv$tY$휺8hL gL:[xg eom@upxf 0޶G;o}EJnv ooyy}{| ^~џ`|M0݁w7^ouk&N?4|Ҟ6./98 qpO6vyOzL*D t}ϝSӿgƷ'^?_wo`i5=3~svLwwA4ܠ!wznX<1H^9y_|? KX?}+D8vV>^v'*]}]2(8ͫ걽^c/Zo;ì=8(֋͙IǍW[/.;=~w^lލxszm,sKCۃ߁#z#O,7ipHnwWh鎸_#~,;f }T[ &XpJCvl.Hv,ݳϻ3!F bkTQB]pF2Р}r ̔Nr|^qḵ9,dL/D`[|9Z`R,e8ES@7sEU@WtT=D4h^~k9fy_cAv &TfgvK!Pr. nIaΔ_ϐ*u/P8eYdV:a&U̴BAlj@1-:eLG3pn)Y [cx?Tmiw hwӎ: FNjfka HJb+KP^?s]n,q+ٽxl,tv H '\EDt؉4d]%( v{;w2KJ.uMxwv}4 D٢j`ΣP* A-*v[vIAElQZGA8uV VG]pExN!n?M Q4!&Dф(V5P&k n:9|.5ӌ"/cڦKs(1LD3F$2M! VVnٓ Q,T۳(Q = 캌;{ǭ*X@0Of(H`mwȆ|^5Zq' a-@ó.͗e9hk8$ǟ^V\ʫK,b5pBMWy}~hVw4%a|aS?v 2"!0Ʊw_3C:$c0O`-6!XYÒQ$ld 7.f#I8G:w![W1D'.qb2sVK>PETnj/5,S75k8PuGeRgU Ea1[uۆ nj8B3vNnB}y[TyDe駽/f.)CR`,}`_J$AV!m4"M)$ ʈpLP(>_ df9U4 n7+ퟲN 4Eu#b%0?moQvtvl%MC%,9G8Z#p8|yCu?M?zUf@qM")c/{ֿ_ 2֫5gҿe3V} "=ڨ)鍡L7@?X؉v:W~ayOޙ\wLd\oqkB*s3:$ dCsdJ[&sj ==M.{0לB-sIYd42DGpJC^(u'|s:&-2CLEr`/|'c>Xb 4P ~3.3M?n!W  `xpŜd]/RοjX߸`Ƚ0~s~.{'oƧoƇձxƋɗ_Nd8ݼb:~!)$9iSqyAktI"z|/+̙sMvL=T cS$a0gMdA ?˛9_qm ͱg@.x;_p{YB_I @b҆`KDe+0A3MP -:cAT2l42]c*cxq;+cؕ1`W?lFkM$ӻb^p̓~MDSP|įBd<)ք*TMJ8N 1h ~Bc`+gCz7 s^q|;_Rf4SrJ j3gUB2:9>]Q6eiǤiڕrY2KliM^8+,FT0իPd? ĨZek!ý}ql^΢E[H sXgQC5tA @.23![+ul<3NtcA0NtNtNݟMQaL.!ş\%g)r:*Eb&a9h0OB m範VZhȀώV(ӲrѬ~/B2W!6{`@짊Z;)Ư^|F38Jd&M:B6mYX(dcW1GV27$9PmLav {!&iB?n\$xC'pɥ?{2a S^__Տ쫯۔..Q'{?Ozw{+6!;}Җ8uSB́?y"kFJzf*,֫Yq4\x/7NXLxlnξNqlYq, Bqo;TE! Xk2*te6ƕ>X~h/1@Moz;*W M IQ#/z@xţ믖⛣7=tvA]~6samfѝ:14O?_8%w@HWF=s8F7t pH o`sNZW^-HiPn]m뗯zǗnpyߡf/Ěo:{0fY>Ȕͅ'hiκ+m+N.A̱4ջ5> i@c@heߥtpWצhtX-,o$bIwO- 5+0g,$KE\B#QEtd,!pBEd ۚ‹-"ST^"Fm9PKJZց3G6g+Y{^p?ap Fw939$5i!dVD5y-Wr K l-05&h8z$71eDMׄ\XWLrAa2H 9d!\XdB=(M:QmDMXcӵČ[ p= n~hg6rl,K KH< s `!hFxKh@g\C`깇.A`\K[vųay.qe9m*SFYTeMjnNÌh/菉Fź$GxBU UJǜΛe,!xѠA[ZMXxM뺪 |nͨgs""=R &G-ghG$k% (BI p1Ⱦ0 ZZ˺$ڪzD֡gVSp [`2́U0X'.'7vr| gmhXa`nC@9i4^vNFk[lsͰwP$ SϚg!Or>Ie-JM,6x9*$/p^ $QQ s@ѥlҭKQpBԠM'˛z.Ʉ'ބxF9'e,]S%;739a#Em+Dy 2DԬiz!N[EU#͋3kت[ɩI$:H$F֭3b`\ܫwKL,`uO-|_^ o肛/(#KD~t29Da6,m;MkxLf+b>faꩌ^I ͧ̽Z䢛#sEIchEe, K;iov¦h6cn0g0бџ'w{kIئu]̹@)\ #KUH!^yHt-"P L 6l,e@)U4fL= 陼`RvKB|%k+7NGn3圳:zy^(<^A$VK)]E7HW-:,$sGp1GcuƖзb歓H %$)n -.o;em'I{־: lmED2]P)-磀{WqkE`mmVvEո\u&vyq&=CH[HϿ2pHM xskX?i2!ur"^m:m絉ҠOlQxnlx73 Zт<U#&,wA+ҳ*c-,IhK+)$Js$6%cܬ^n4C 6T|&jmft#үkͿֱIJQ4u!1z|pZ  $V .l2c@*SY.xj;Bzڻ'z4U%KEӂrp<^91KLJ+k@GhvczX؀),VW_b9Q _{a=]_c6q3B29 Ly @vW3@f~3R$ʶֻg| #%62I 2NT`mj!񛬏5 }c\&>ytA(-Br1aF8rsvЋe$;%u}a `[$: UA۪y mxn'*B"' )`*}Cߊ:rz":ڕN{9 Jz-+S`P0* P C!#)ljHl~=h]M0XQ# "¥#[4u֣ʎ| pP ;(М*.xN Gr["Q.N~ V@CmT7!˷HXU5zJg{ȹr׺ߟ~`2g?c$ 0D`%|i9 ispv$ GIpf"!"`\P"D"/o㺇ZwGPo:%w?\Qp[#{JWkLӾJ\LnEKeh;vĸb0%GPY1R*:W $z0:f m?Fa9Q=_Џ1O!ϛSų\@zap J|ιx.66t' <-G:R>״lZ8ȡX,2~,X:$cwRsVBLꭄ BPZI&KtMOWבkږ<"PeL}}˕N\K H%s6ZR;tDӟKG6.'ږ=f -'l0ׄ 8XECR7 <b!)]LB2C&BTU$P{I$UD(4~9^1p!49mKic0cV7;$xXƨ'̩6ίbTP]~I^qw09P0ȡV7YvɊBLdٽi5@ -ӛq4# !=P 2 %kYUFm l6ڽe6A9Xn@.0C cm7Sāe$/}92ks. Gj8Z <9z %LT5Q~!%#O(ҕ#OG|5eP/_R]jLgEK߈I|Z#Q#O fHH0YGuZ|eȷg#CEWK 1+#= 9-0::VJa1ۖ`P0#̕\,msi4T>eqe&.$¿ǯ-eN@LV!C"st q Hbjզߟ -j=m1D'LrQw\%!~C-)VRDEX.Ά'Xbll+p*# rdY 8@fS;2XOٻdk[$7kBS?gQ&ӂE1 v^ǿ]I (DW:V&l]pMXC("X֫>kP $h-⩺*w"CU-7?>tFUq[P~=nYg˚ r Gudqc6dzВaUဒ+tI BrԎ Iv w}.%*#Iş]9ڗjPW_U߾gWmvz^qyvOozӮw;=?{Rns쵯:3 a0h^vꈋ-zm2[TAu]ge?QJYС{ϯΫ`@&% @h~pƒ,T뛪>5qh4jvFc&18%}u·Ih$ml,w'8' obZUj lM~2{Yaz`B@?FkynakaIFk}2S ˱gd2/ca<Ɔ]|۾~]D2n2{7~6$(co[d\+{ mt;kTUǭN 7$5uqx90ˇ˹h#5ۗ]C*@7*\NjOgm2YN)n3y]0b'Iz9Z'F0'2/wzC|pgୀ:g=8`n6Jhz)Ȣk nufКM6@ &0i8̀Ҿl҅KePOn@^N˙,g`M ^{pqahXF3 ڈ>z],l>OgWp⫚>U%3eٯ*nljl7$AZϮ́]; rrqSo=r j;Bp9+ )#4q P. Ό`,uGN 85rgI\j|b/׌<=3W4kSdo_4{g&o."~T Jsgfe*/v4Ͻ|o @=Ә :=.5g\&:p:&F-l`h`W@ꙏ^cMJU:0tZƺ^cQZ`'^L-ЂdZE`7xƳl \[t=.@?y&Ϳ) onqcb~z닳_Lcқw,cZjRӪm#KxTSЌï7Qp72U ;!LyU8=o|u9tG\rOz nHA7_KzOaqUL L94VN~ZY췻o:::F[QF/GGtzgf&L}UJo7t82`LlepfM+6WnDS0k|Ys|4wt-kgk  l\b(6nvCT|j^Q ]S427a-3UZ}B0Ho2В] @%; jT Qmm pMY&ɰyz_YD4tjU$ dYoxZA3m{Oep6bWcLw1M[\燐Z[>7ǭom.PS]l@ BUv6 UYk{HoꮺZ*Pq'.sqd#U,@e}4^lulq̗{#~?x~?ӣ6ɨxIz_I#\~tdz(~_$?~~5;/5T=_ק>[/l}W2&wP\ƻtN(&I];hW7\^kgo/gzxw`3zF eV(ۯ/㼕t9`A `f.T:c>}/7ǃ3hX|hff$G6Q<t&/yX+Dl8h;xI[MH9ck4Y.T';A@ Te7$k>xL=t s1+utlR Z7TpLT} xΏS!Ts$>~H0H&qpDD^ezwHKwk?>9pߒsh>7Hx܆ 0i|OԵ W=Ƴ\Y5e5K菍`:h/ dEuo>;bVz9$<9YFYgi痃0y7Ͽ}gaG(.ۜϛ)(cߏG b?gz(`ʇ<\k+ {<Ӭ(^?a+}}>";򆬒FyYHOR>p>u߮J]$JF,B}Dp:6Ld7yVij$s cb Ɵ'L%p;:h+2|yR R zD`Ż n֊,V(ΚY3,n75˯:9uQR\q V9y֢?b\/n{BNaC(1h̏_3u(Gw[IdIx3=#S[~BXg)5 BCKHZE̒4@ N` P,d'1!'XggBϊJ؏i .P$BaR3I(ghT((E?+\]N^ I`(D $|F #9=+/O8KTheܡH CIF>No+D QH]壘H_H\|2.P*/3_<-+ ^-/_53l#N-]:xnd S|%z1bP03k/ SLΟ@g*N#̞&2'F*]6bDvkbT{JC?)FLyb'+ R*7*Sad/C.QVS7oǠ|+{7 }^3"޽XY.w!ь$KҾ:a#s+)D#l3>ϒw:; OGaqR.mAׇ*APS]sǑWPr,4Ϟ\r%v\q|8T3s$'zvAb,' [-;~âg_>((t&N\.Xh͞nc<>n33# xVnFWB_SfV22BA*Rе DsTs.W9L$7J#ukӀQFB>?XzˈX =%YzJ{A=N-rhs e*\`@Lmz$CJ!F1vIǩŠ8 {B6%!ni+3 N$i!z/@ڈ6c2䄁RiZmxfe PiZ6>mus0 ߚz7-B) l q=ut?Hba)i#=7NNJu3wx?*r9O]_=͹Dg@vF[?+}IAo~}Cc ŦK>4(/i:)I@$R a^OBsMχS^5.L'{ya޿}Ĭ*2J1N@4:Jlpe4/|(< hr%7qy)k By0VtWq\!LJiP:k 8P)ix2!JFxtR+1:WLU,8V0nQ)1&5s5;DASZĶFPJwPX mg㈮'3ʣ&W&M1g{vW&m8Gy!!s0W"=_Ψ T&,"/f[/h6b,F%9|}<*nç6 c)Tڥ2&;e ]+Q+b|MIgk$0¡1^/.f,l; Ee{O$,5$|4 \{YChp}Ku|g frs|20/._->cQQ`$VaYǻ*&mvDKUoML# #vh'kêl|b*Ŋ-|¾X9+s[ $nШ P:,-QA C a7lG_@ل0T]0%-/TJ-yiWh cj_ "!eHşcxZ*Ӫ{ u_5焘S_Y&ⴾ?9z%w pRB,e\I͵88OZ)/po{E.|TpT(׻V^rī~V,7ScbJY^o!<\{Vї52ˀo_UA'(ˑ~oKt:<uw>*.mF0Z=3+FnGW^bݫ z{êt h/-vS,[_ RD;hz_z#[D6ELvv@ڭ/)m)QO8[&4U !!rݑ* Fb$N陻~({?eR1^]ퟻn9 \gB[i4x2ή>(X__/֯n_݃x}+IikC fB (}<(.o,Wo;}Uչ+Ae=+N]i!H^9aTէRһ/Q)aŁSu6QhLh `t^g-1 DZ#Ub$sl-E\n\\RtJ@QQK1P!"S9Ӥʦzw}|=K˳K`E9l|g'}g OW6N%-yROSԖ[F.F/[.eKէ'b2([jQk-z-M1ʖOSaj6Y7B)nR;MV3j&+}L(r~XsrzND 9B+?Ei_EZ{k4* ^8i3"8 Ą~R;]}cÄ9qJɹؼo^șqt\<}b(p"7.#NmHaT@IzO[As& flO6h#K]84L_dWoJ}I2r@ ΍ҴPM4k8 bHO*ύzA|0ԀH qKڻ?l>~?Pw' %Uh#C| -QBnuH@[q ? ܀ Mnǜ'+8R͝xϪ9 `|NVQ# ҉A5{bØm!2`\Z}e $ Q˴PjMވ۸qRZN(YZ.mKdK{%zr b΂c,Wԉ Z0EHXulػkI4Sk-@sb?oqr.lhjNtغcHncE"Eȹt9/|nrSh^<Ǐb]$Z%zR\ןv!|? MumG[L|Z1SpxH$blnjqzdxJYBAOtLR jZ*]m3vvu Kt1c8ƺH^\Vq>mvF!5jvkQۯO>}9ap}r=q jŇ._C?/z^~Q Ml=2E ^ כ ~ᲡcG)X3e T.\un:L5}xAi&&OHܞA=HF7X\JXVy1(4>lRZ@`c*AJ| ̵@rM5w"O]rs{0kVSw[ 7OWj0j֡y];DK4v-[<0EC!k3ӂ.Ř_ך-3 hBO猖^6J-:gA;gt[wIH>/jBT_{\8a>αEn9L76Lzx &: )0Tӵ<s\Q<ͥbzi T#<6ķ rw'O<|N@0uJWG袚|oo33%Q]^̰Ow{2wlwh  _*\&zw *U]p ~o~_) ND>Ktm,dZyэ%z]2(KIvɠ,ej{|c cXZY?uՇ>(&Q|׶eR*QRb`B1du2Q;]Sʛ` ٤k}9]ۂt^$m~[P}AA'χgHBYDW=5jgJa !U}pn(' HRMJ!"C1EUOWz&Z ?,m3ԡS⭮٪'&3Jxʈ3{Q "nJ~eN4s%iCCd6-Bt*ӹZjrEt-]ތ$ u<Vle:1s\=СSOK>sۼ(bw*REʓV $2:6 (afA9+c6 cFQLa\UYp4nE*LiF(IZzgbH[Ii92$F1E 'H.yz7(ʼnX: ؂w`fӾ~JjsgNpX;̭Sfʂ^܅- 僮tʵs@yA<:JfV@TM gQ-oEMPך~XAg9[#()}wq|9!Q]fO!S 12*?s H؝)1S*8ݺ  oB_'~}K{t$=G`?~^ċ0&Ia$뻟yόg#9 x)}:*} f~1TK(9^%K )IH.Fi^}^h8  W~]\=X5̀8Ego13;_ ha)WB/3!#V݄۠RI4^(oh-BBkA[=pjqkLKA;(B"wvC-(,1]41A晲59͒o_JƖD&؍8BxB0IGJ_-}~Y iEwX '#0!gg_>3|*3I9a~BJsR?p}i CNz$1ZoߏJE@VY\EBl`89$怓N0A-ŌjU ")C;C ;.U ^5` -ɋ$4z^ 9an Ol@%Wb KvMUV1Ih $oHٳ0 ^UVfk^0U Vy! ޹!q5GڿHp9f `nUX-&X;:s´q- `<޴!YeEœȖ/2ſEPܩ8 U鲧 MaO*s !ۻ4w\f5f qÍbN2dYdñ.J)d,(X}V%lP?tXͯ B̬e~?Q\(ٛ+2ۛN.g" BhyOTk C7x.qP-=!a43]; b`NPx'I0wy_]!M؜|볦sDNWL3ANug};cu]NnmXN&RdRg&PsUQZ 1WV)?6M0sRz)R \ʼnTiL^L D[^dVJ1ach:;i~%*$ށp3)Wdq%S<'ϥXZCK逳44eZs v!o&]owɩR"'B$֙O#hԔ0$YɌj2]rhф0`<0]|1PLmV-T-7L_C5JP%'߻rۅ$א`"skl\G#VzDfE}/]1q+Jwb{]7'V_:D~A]xnPk nm.ve|[GAYN|gHFZaf.&Θb_nKn6rOn!8\n´ vtg?#-8&lof4mYgZ2d Br,S&Jp ('J=sgI]~6t =wl0? '%8cEuΖ??rޥ>4FC+.<8_#yimљ쉳_j!PZmcwt~'&L D +H".y輪 K>O_4b^Lx2`Kq%=&q¿r?)^7~Nj8;(p/Dl#5z@Wp_ \I5|*A>q_5=!"_*V(h5JbԗeϿOG hQwi8$1\T_4ڿwŠCpq?=|;.%y o9rㄗI?KFݓ2)|K访_)`3?y7d/6 TtCS$[9u/L& oF珖~$1Kb]2{2C泥6R8L~@BȇK]iKwz)EB+?_} ~{.`wxۮ'/1BHp-6|NS<ڌ@-hzܘ%?G( t>()O~\;KwWVW ^R9j%ib .̂zI`]ϲ+BFrgb"V?{ͬ=Muyȇ$M}c$i:$V#K$q3ܕͦZY!F!B9yChwsl<?g,ifM.5Q?+0^%xdJDSmJ=A!60` (L ZA Ea*1wï~Ql؜E>q,d5 "nce2b8Y^5c;czKG1̘vI 92V`(p<llnfRI*.0>w& } . ~BZ8q0fJ>(UXHUcIqRdʑt@: scig"$)24Кbɺ70FWXǘU4b g \:_4_Dmʕ߻J-%8T!޸5IۖqMœ/87nMuHBA*&Zbp~UpJp^ʋVÑϠ? ":h+$mdLUN&ZqBI%9cmDJ@)"#2IS2쬷ʑLhWJ"jB2Ny8˂F:6[mPIJ*7^- --ӴE|43ʃ*bI%B1kFL gq^+Ձр[JB)AUJj3b\/\{0]s+41sDG^e%P ԅ{ WH:<^͈dIאǔQ> 2e 8Tc@Ͱ ķF"G:+T.Jg&.b_qЙZzltN6p(T, !,>|ͳx}9ZY^(^}~^sz&= 3IIOLzRI dbV2#O=}T\98XL~D31iJ J*g2„AU%B2 j2B0&WBLҤ[eNKH (aYRFǠ!JXĚ.05N1͌cskשpδq'P,h82%EV:1$E,,vD`1[4T#8BKCWzJ?NeЕʉ+nA)먢$D%!( QEI9(q8 e|4fV5obowO:*?6%̩^# cky{31l~vE" m:^ghgyÃIyzAVφ_G{Ю~+VP[e_pl| ՟uC|yKCVT)jZ(!6ĕHೱz滇1gc|UVΉxĴ֖{J;^OxIEj!eg^@ͫԤ6N"d`yb7ydP8U: jWKN1`h2p;pO_Y)7%;J,!>Q|dGF 5d?ΐs{C)'=RVpK(DH9%fd+REjIQ`AjZ"L&)}\q/\45՚9.KNG_W1P. u^TH@qRnB[Ӂ#w+% " xek>Ʉ9K JiLXq."셬I+5OC lzCUvH4W(GFKl]P$7ȚFd n~/W(QYRǣgQxv$wYC}aIMoK>':GmUZ5عԱP!^ sNhkY|̈́&jK wM;Z)w /Ȇfw{4W$t"W"]s>ؓRaZc2ɝ Ԛߩ[sLkv*(jNT",K=*{q(JzJ&c%o9+EVZ(ś %+]B,+jg9jDwJ5-4[ŋ[bœ1F"n]#:CKx˜?9~ 1˓ik ӊ(ٹX شX5 ڒU'd%ޖ՚d-[|f9Z2OJh]_ _xZ>{ζk5/2<LX6-DI…]Wو dtOL :FF4AW1O~_w ___ Ϳd k<"$Bþ w&+pU{?K29ܸ[q2'£f jS!Q:O/W6/(te^Z"^Vs iW?\мs@%]=y? > \o\jY|"P*ai_ȿRw;篇,<Ϣwr _O?ܸzi&mw ~*_]p|׋w9c]{}?Q4Y_0To5ɇ0 ~2 9~]L*{=S _27Y?Gx~<ŰL4N^%0rC^̜uFzPi8E/`fA[=O7b7P@ GzV| Ȍi*FWrCHyK(B/ VaH/ nM1y6`0) ֐"f0j]&( !8-8xg1jleh/QG{[3pm=ڋL~+*{<7NsIKOPҶC91qo ѬmO|;Bτ4$apOJ.UB[pn dhcSpFd<8\A@S"]HHj%mDGΚ_q y矣 0"k;n`&_ dwe3 g&9cy2ea`(`#XG^}R6Pzc߻kyfv?kM 1_j^֨t l6 YgPŇDa\0M]1 B7W⸎@TޟkYml#i5xǽ۝cq{TEioO97+pTI@_uŷiֵ!2>Q.cƹ4$3^X8_S۰][o9+_RC FyM=&m$%=,ɶ.%JU%[w-N}<<7xd9c&u4̨_I-1s<T6+!bW![eR2-(&g^p %ЎqpmsPА`*D\RP^0qAV1 !ILҠPP'4SJ X-𹝹KT |" hXmN!Ӷ3d*Hspg.0EsMG,=+L2^s>#}PT&8 ^g`O\ʠsYtCҀ< kN0o $|x' s3R0vg&ùG1&:8vܤJ9a#ZDAp YR AT:%e@(,{Kȯ{z;TD'Uuo*y1ǯXJ~է'5hчk>ܮFN97μ߿nƱ1^-!]~W豗7o5bPhɟey{Fz6_4/+)idmIJoO.0S+1lr?TqZ"u!/M"ADz߮hbMٺMJk{=15BXϖ.zoӢd 3Nµѱ+X۱KqY3K\I6ʠ*.EA{ig;D(ڔd%t B2uQjx%Iy8.eߓK I=ӋH[M2ɅT pQ~Z 2lJ${+ `ݔ?UYlo7gBw8thn?gߪ)Uk"W"a*Fȣ% H]}  h $?l}^qɜ"[@{%_̎΄tq95䉅KF!F^*9d[vD0044 ityFWK~ٓɞ1| { Wsƌ7INq[ ެaIgbJ]:CДJ]M֒Jhi sԑrD$mZ#*bES F&Jkm6h?ȴĢŎ{ΦL ~hZ~h󥵫v߹*鞣e?\߿hgzNEG+[@W SqT{fTp$;~QWc4fb͖uX^Ermɷk"@V}@L2Y]{q_xRp^}#3IHV̭%&"G#3^Zl{d1)5:4pS&CaгX̻́N 9 le :k,rSL`;so,w>6U0\Tъ9 !pJ}؃bBS^H">pGŊ,os`[ !J Vpj@ 09UfV+02=yٲu}}^};6p=99f=wz!Hz=8F& dDϡO"D ghlR 4IJ !{,AX^%uJ靀u8ՠ-WpW=;mmvWd) 㦕=Tjx "0iCE dd E{L\IB0')zq>=N1ɥץCQ!d%FY'oW˺ܒmbՐ](K^ 2.1ZS ŔU sBS%̮bl\u`&GF#agJZm,HFd%gsIh)I1K芋wH=95 yoל'Rp&%ꐜ'Jγ M޹w,߈L_Oi+etnr3Ybx4k' h7" <5MZt`-Pvem...ΪQhҟAY9t*̠%E l5II+_ZzW]$~NȒJY ,4bx8Ɗ~̀&YbkeJ=9A)rO5 JT]dI)ad6(b]d ywRDlβަEkcSu4 9{9L{8 *keN3p x^j-$]VI%%Rʉe5XNXһQ& CJb뮜vo㢖sF.V:)}BkkBsFڒrK>z VrAF_r&suikn%Z65$ǟK5S421* ;Hw6-J`3=l$V}sw"Kz>ϫodZb;kr$xÍŝH NEFs)C6vc: kRJOAF b5х`0M@4v*}62Pg b{ϡlp3nGqQރ45qtl ܞ閜i5rH|d#1rKB"NZ3]ivn0K\p݁Hȝ1M ,#ؕmZTս\gC7yVВN:9pbgl;۬/S|yQftÔA~ZNɖʩ$@UgJlOft@qtVᛩ1u`OoD ]E6A H٪Q&6(3CXaqc2]$VMA{Mz-nO$.u Ez=lLZrR挤͢ZG`/)w dq巏i1q2t7 -wOCA_X_(ـ=>+GW0d/2aT YكTdkUl/RX 4&oK~FO?s3}Ԭ;{uFȻ[,g!{ kX~[QnP>ż\G"1 ٢`;z*-=][48%Y8e[Ȅ!{\\ƦZ,jVFb^/ܾ„F+TE%n$|a&m y|WIIe=tGj<"pgo'V q`[;Mx>{ ty ?{c KO^ bWoeBi?or;-va3F董SB'WƘml4w-4ZAٔIX( E> -\^S'8%FsLK&Br\SY%18a8'+δ``ǭC 5ؔ0UJ^zOƐ5YaXzNZI\kJN򆜒FZd Jh&mAnh1Vnv@\xoӢhIN>l$Q#F]cG_!E}^bȂmid+<^ˤωxM%+c6LEĔ쟯'g)n$O>c};LNH~zT?3'i?_|ݒmuZ /mzP_n{bsr%SV'*wo;T1nݙ$KeuRǺ|'t-W8NO}r!fdf|'ϊ2ٖTn,4ǜws[j~vI[%%ׯ^|_W{÷Ce7 G[6rz/zݲCL}#G}!0씆`m$= Z n4F+ -(Ii?tfryCnlĔ)iS^!/1*,XUF(Yڭ^6vѥh.un[cXyd=ٙ@}Uwً+݊GG$cҚ?]kon*ОgF[K'eWQr<˫>XGڇ3<ǦQ"q91PO&#f:zayqdv䌽n(n]0l) r$f}KMB. YGFtԢ\Xd\/4Q}SD?S>+a nnRwTܢWLKɰ+gh&q(q<]RDEt>_Y l 5!)Q5RW=TNqWhQg & Wi¡ jm9_DRV<5F{䩀-O];."} Sʵ!aa#uq!hf r$Ѳ3V꒮` @ ZaIVx.(akX)% аM@h-Ka9-(kԐIJ'j˹8) ˙HkDj4 ;}*?ț@DdBx&X4h(e+nLO>X{LzAkGjN:3cbe]1-xM~ ںHìѐhj=3)>wOk_5Wb&yV q$=xOq)y E&(>;2(Gyiܙ*&?]a39ޭ"˖9Hz,:r#0[-F أV]2R"ZP!?#`=%80O{{AI?vۅSܣ!%D_Y G8Lg@Bi}k3 HM䟍 1]iB<'~;bPr) JQuPp"m.PVq<74N.ˀW xW E>Px`f]۠FUАJu\'s1_ŸQ-B2ސܸ}v7"ZHohm# 1zյk1Ex[) [8\ aN{Oޜ_}= voyۋqң^~1LL&plc8ܿSw|{qoCm]n%k{8|3o%މwD&V*-LˉN\I  2&e^Y0 ˺-:g5~kw#gNV萦5:zwzE&гp}VC;їzRǿ1 "|S?,u积$_ICpzf!@Ͷ?Iet00]ߌRֶȝE. ]H9I,YA%I&xQK~ P&`)Ÿ̾ i3;F,PFPn-\rҬÜ쓯Le֓Ǜ5кvl@RAAmR߻_lXFܕB<Ч!9 ;9 S0ƐCSsh_"< "J͸) w&Ɔ%v%R5,Neb":|?]~U)ۚ!^fЂRrQ_g?/Zr&}f:[Hۋ dH2D%5rH F ٿƓV͙mCݠ]Io'+,n<ĕTRjCYx"Dt)o]vPbE ݭtierbRp;Th>'Utէ]%HiF!OIjԛ좼?u㜿u3Snc_laʁ R\Jl\.߅%O^!wܹ?lN|?q1; ݫj#0bj Avv}HfuB ʳ!%p&gU2Cwؐ8qN1pTT(FO,V(2/x U6o~};L%wN EkOVb -NY+J_]{<w?N㏓KT{R橐_[ѠrGt>JQk\j\zOfWM_`ICty\ (%W/:ftI!2`.u+ T@p?lEZBwULc/ - JUs:,\7&mOI}PxQ22e tseBW;DLY |u&F:oz#z[fI*=%L- K<䍹Ruav) bA@9/^E ֖9WD5طwN7'QmFԘs_>lD'[{ =8=麑 47a7zm`Bps0?9s#ENzC~pkf:؃ 1Qzi('LymQy#;hg2e'9Sr icR}( L>L&0kAzuK"!T܄L lwE$|J'!~6 |w"!fe#9y*ͩcݠ/عAey7 ǯ7p DT[քW.HG?K뤶xuę%wqdpLfs3ȕ+ B;]kfNBG'|uwĺsx/˪TSb,CQˏJWTp7QD߃EBtB8N7Y<"ٴDD2l1\K_3}1, +Yis5H}t'f4{[ѰlHQ [_eL햿h:ΌC'ʘ|(TRd>si nޡoQ1Ќ[oC(3OkJ.!TuڡRm)Au4o%Gx 6 RP*/89`RPoV=4j(\p,Tc͠RÁHG] )cڈrԗ'@p4 '@p4GMAspezm8n2(ht&)w2GOzۉo9y DTӇ]Q? wG.zTiMPиA ":W#B"&4YƄ.AfGGۉZG*@T'ulպӭ-uKvjm&x %~vWs !$uK R)O*N#b0Vm`NFbNNDEZ!}純0N:81SspBp͝ޡcd  ĵmQچ@=⦇lnB )<[#)q'ڸpJ-dʑLeE+s :#aVZC&w**rrey,dZ)*Ӑ)ŒF9T_l9* D]W9j \%(G>H 5<]x'GP2[omPn OVIŮؒ=Y'ooίL5"̧p)Ƌ>dB dx %~_-ي0ף H p~^+ejRq8߹<;; %2ꙫ[Yw=1V:erѢ5?^ېU$rP~VpeI:P1ƒwԏ\R {wdxXtEj=zq=-&W8X/,۸"p@PeaflqU< xm ~n6J8]sZ2aRB{_P e5㹥)bʘeu APJDcZu bԅy !4/T˖98zkj#pbŜrZ Is-V!93}gz `hOЊkyxe< `='Sf0%wmX~.އM`頳y ۉo ߗ,]\*IY*:`EU~yxH8t~o00A%(G 0Ó2_D5z\=g焓㋤O=;9Ya)zVquB8L0mȏ^hխmkVMnsUׇ #h&\@ShpDIJ 1nN-߾y'$3h+ PZGi61U#\ר@A㘆~rB~շ﫮MM%#+w޺ӸqKbKzRk" TI`d/짻F{1jE?¿u 4S1많V}݇q]obkXEezUjXFCᒼSl6JB$Yy#7; y.GFrZΓ3{;YEË3,k)7D4O*yRq.ja&k "PxiBUW>a7~J?=p77Q=.kZX^4p/ϧ!Rn Ln$\z)ɿ@6k`;n{٘ߜ룒gt?rE3:|MwD{wTFJO1RAa]Vqo T_/VN, Nh>}zl81lFO޶w據o~Y,ݾ !ć9OL F)r:ṕ#6;t#a>`V%ƅ-C&Q(W8փ[`3AxZv!ADp`$8,e7.1QS0bPC\C*55YhU 4frqK%(dBRrIdILH\bEcp:[Fs^\Jrځ]-''`vvr:Gj~ cڴwAf˙=|r7ͨdڒ tQ3-DzѺ49ʭ-x.j/1~9`pZzRt'Iemx+vK! b mO!' my1C &{TL t{~޸7.%.8c=}af9`yNù98$aktAaO a6Kx N-A(*P)d`Hc7p(KlVZ91T%& pÚ[ET%[J C}Y:6(aVcRbu TJSJȠR-UCCDɄLT""ڑaR)-, l e- $@iƂ [X!896E4pRiÆw Z*jS?~j(΅ QgH'V-oOiz>U!$z{]EeɪB-ޯK@Wu+tͪg~Z :?hԍo[#~:/ww&~7MB.]SYy-`RrHd0 cpu^^L~x~x @^ $D0ʹd_ռ>+xw] 7Lr_v"(Rlm~yU7] Dߧ0jXb)CrmH%G博nh16Tyku̵v`*#M-_1*f|'F@/ٮ7L?Xr0Eނ]Hf0~VbFBBFcZ !a~bA :( C`69C4ۚBp"G[1E JWIS'y.kPI(Tͷ9IRwsфr]I=׾=/A_R$; \JʐTDA;Pli3}=O`mm@@@Lۃxw=CMFkZDI)_9Dh/>BӴ28y58iGqw%'awi;C3tZ.LUI$>8 3ֵ=t̸"J}SB{ {1f1oKMߑ1/bHn,wY,x_ym*vĢo"{۪8>k/~u;fڑL ۯ փ[J&0*kc6swJQѶ} Mo CS@=8Czuyr <@1T[3Ӵn1*-_bN:dѦ4G)pٲwd)~z|O.r蓴Orp{kC$geYƀ#Ȼ&VmR"eu&^cNK&LfB'UC"[CGӬkC/yƬ~-8׌Hc;&RM3;a`ƅڪU>ZZjL妇x`a)c!0`yHF0 $oiЮݖ}wjV@rƧin4,ٕ!+_4{mL,]LlmӣcLF_-5B;-Vl= ]A[|Q1WghǿAgeriYA0_FJmhUj]m7Rqݯ7mCUB V~ hZ;{s|Ye,pzܔhp܁FNIRYgrQ$O \}k%%8V\pZ֚ZLj)(n Q59Q`~#u,,;@<),v?b*Iv-(HcmVFT$b**)+%%PڙO)m8bd)gM:.VnRN3{ַnˏ*df!\Mrj?kOJ0[܇ d=۬ OF̔þ_[PLoTS~wyBS6sKOp6nv! !Sw SZ9캟{zB}e{50!c}[\k@mOD I= ˿x\H֕ i3yU39ZWeAI?2% V0R=NZ֕aj-}X-ɝqf(2: :<߂ʒKOjG2PP/2{[`aj IBuɡ,= bҸ=j{*W2:Y Ε*:M !;e0Mi-5؞mV)MSZTPچd6TC48/@{QBgZQ#c]C5UcK53ZM׹Zkr! :\=ucj~Q\1 |~md̨a{?Ym4W^Xm\ҁw _0ƷH\խ}%Vs\Q%u^ޚrFx6Cg3t@au{1YzE #q/DZH\D:]Ɋ7g&wXɛ"D՚1`x%SG0=:%&L svXhaj(>-{^d^Ub5zEY t*nw.w [76vSd0bӝlIxEhnU优8W#UEn&gz 'NqMzI|#oNi*NGW}V;mlbsV &Fl4 Ey}ϯQׁ>}⏀jǻyLP^k y<֢,V\yryrZ# ڭ: ifMzu֔fBVN8}iͼ$ $E5WZP+&GњXs9"na/pH\][fk9)U)'@ԋ@=@*0FdMT SJGW@p&Eu]տ 1Ю=EvH=c]N{`r:؞h)nNW T8[МVI9;w(,5C!S- CBҽ= :5Sww'M(_zHh m )b>RHb_ [!8@>keE=DH9F41k\pe-4t8g!h3%'h.ߊƴIn BCCv۽PϷCBsފpC]a%AQR|(qUR5c<Δ3L#^G[) jQ`J@7k$5r|Mն pME@(dX' G!"fMD !__\)2j[.̭g_󷤬%feW2ŷf5=C{߾.%M%n/D#GJ_vQ6$hӆʿ'ȯSK _~)̥w3*%zCRt N5^F$Q^WK`j"Zd3A[UD3~7I"0|MM#\A4Y[UdE4\, ͈-1O.t]IDV؝&t#{Ah6:1Y/^/6dmC&gIB<*TlۇTFat;pe- R,U(q䑱J|tD8EC6L IqA]}6|Ri#[ 7g)|?tAyU#Up-+~Zn燷2?O?=|~\{zj˛c,+' O~|~{\8rzҢsDȇO;/W_sD`m"ݯO_><eF6xCw>uy{Qm9s9 3#ZP0fFA[|ѭj[P I=xs D]MCYʚ;KYsg)k5稧P2iE;+%A*+";gyE 舙2F&,:Bs@zv|k%x $KJgߤhA!6>Lܙ#qfH_GY3B`tF-q ]IDFz}'^83-Xu`?g\N5qbJRAH>'!C@06忕D\םgL~t^99Y -;.T^ {ekBBk]MTB+g:"kw錡%(A[ޫ }`^DeҚ0.Q W@zըsT+u<B=Bї8OQ(r2ټDXoP3\7oLAY=8!2cgpeHZKm=C,]hy7PzShp˭U8VYBћ'?B ="JSgxddMM0 З@љJbm ,dtu##:1QKc 2 D y缥ȀGL#QdI҅ ʈvj31q/E߬CkRJ/DnFo8)8[pZ:R4=5 ѠuImHEKqvCZl1M HL7diR{ij,P*A-_Z2*l\0M^饸pڻ ˔$4Q?&ABTlOU{RɄzin`4evj|lxP-{4O 7 N큷-Ah͠^pR%ApL7Z(#5&U8cCeeLs摏 /ܠ.ȪL%A4g&Ak]VoFhIrlCcW^L+[!~"FT3C@U yZã S.XW<~zAJTOIpF-mr-?gf@TJz2.-_xyksT4_{% c:vHu!z}uVYb.U/^zʠ%2Sx9yq^>bHFH= @m3͙ꯪS3.|aޣS?ŝ;Ȣ;]8y:8=ɿGOd/v+^ѓFYzNZ d4g?>; 1fso~85gC!9ws%;"sfυܜpnù;@#ϝp"W i wo" #i yVjOSAgtv1{J|?!w" S<~| @pq׍ϯ&K//O.M?A3X ❿>\,qA~=/g?G@؁H`.nޅϲqf&?}}\C~{Vx9^j}>Ͼϣ/=}x;xt拙۷b\O~2@&o0lFL@A. >N.GǸ% ԏo3 V|Rϗ[D KsnV.>/MLwҊZ/7owd~PŽ$l4z[0LVӊ2Fh&? p+ggGM(u -=S )]3qQ3 wBёJ#Nri60 Ǥ~st}i FqNh7e\LnxY5BK?bּs3JӚsT rFۃ| IhϜ@KTJB0-yuY7co 6-qsO@97$`X^Q.eg!E)6 pquΙ8zJuQml1/R&2EFL>B\z_umpv6\ WŁI'y,Lj(ʭ J°(3y/gbɭ !S(\CI {.By3Ƭp4+tԹ2g]Ε:cn~݈Ƿȼ!{kJ0vv1`iFrR¹%vv3 ~_#Ib!}xlHpvzZbJ(HF)$΢ >tӌ )$Ӣ(5}Di_pI7Ԕ<~h…ڤ}g}kҥed5ЂNM!){15︁<%Ӛ׃PfVvbm蜠ҽuK-v"S r49o:3a0>Z_5D1!>n$D<ɓ4KrSf13׼c!7yƝ PIF:t0m=LEΙLj1^fxKR}j}QKMmJM{:][)C1%t1%=&y=)LuSKӥ3 5g)A>TןB:)5︁<%Ӛׄ I맓}Z͢fQi&x"`V_jTfaZIk[Ok-*bCHVۃDF;l%?֝ګ-o-Zw>&{rD; : ZD4N6jEpx"8or@UqPw:Q&zCeoqRQ-u{Ws}Sef 9P@YXWȜ<JVR*6^*NE/ *kc-ե1p/LQL1Z;+P1vZJųûZ ~$tAяM[we)2f')֦|[|yЊ?fZ\;֒^=V"[yw51ԸJ,Jp+qi5g3#o?5o%;d4QU.X$m@e+BDAōxƥQQ=9猲A(et{d$UQkѪ'vjHGm5*#V!b6k2<0NTIlƸϰsFqiM{- `LLǍD!#*oi-!YrD=>8p 3Z/9c\;%- 4ìĝx:4@,XRhւ!1 65N<&$8#X\>N3&S%' kd'+:dF{3f5elTј7Yۘa^0oØfi™ >+8Ìs8b;K'r5$,pfw!z7M3U6,LQ^aYcJEq" e6;6'GO> 6 ?x08\cfb VXr&~|1@!閞xB$+p4.߿o||ZE+78PA;}u[SF>&L\[x$@3k`Ăb5v^`&US,!4=1NHX, ՌŨ{ VrZˑӎi90d:MĠzqIn HK6F~sX 5@ ~)XAD 6.WHX#>V<`MG $*ñbP 1:> =QF1C-ؔ.- 5$ t)"R}9Jq 0zhLɈ_:qB9]3\O??>&(ƃ' s~`00'{t_ܧgŎ~̆ 3ѿѵ)$^[/)mIvowE_fAjnugO#_blK^{gGt:89oO:̮fLXpwЙdS?^-FR6,mEBɷdn"],XIR-K4ff*܎];24CM+H@dԎ"K<`QҢ-(QοCTv2|)srcw !KڽگI]R=}-]Rڽ/SQ( lЈ5({b~vPGQPL(݃.I .7R>=S f|]E׸,f)hdb`lH}k Js{ӷ/r#1+'%z' YI#fE38,Y3if –7ܦy|镔Wz`]3ڶk7`D;fsa᳆XTnO~&#Ckqh7ch-Ɛ5 桇 Xie$-q M5 &>(,:94|_\=ugYrZc@K m=\82dB>23G%JBV#3 nx̞@P!Z ؃Z<٦LAtCn@v^dRP( 9Bca:8ŵT NqS>\(|h r8R*!c=Q! ǁ{ kpFЃoq̩N8ZB)ÔyaXy2 -$}.5H=K/R]!t)gy I<A^υVR"T%W\q$<&]y?Ejw)[ۢ (OjwL / y &N, 3B40zo1ҰKn=$.ZߔMZoJ֦4N3]P*B*˴zb-/W4휑+hxP2J0/.v%$bQϽ"&7SUp$m@`+>L5Y2C`P0=tq%* <tl K3lȰq1RJq-%[wu$B}IDKz+_|V%2%U'= .1נHzSfG'GAe9؊[`9ң? z^o=Ғ|qW!\b.c|r8\>*bbFpjj~BO"@/;̦>(<)P!PKc{5 [Dh4=TgXvsہv-5VT\B΁7>h: :Q.۠ɻ56|K3Qf9 ˎ:tJZb5EmҸ=mBrFCsݙאJ X]VV1l/Vd@ %{n>,6<|eHcsmL%>( ΊK1CV*{0Ԗؐ썑i )LK>VN̚w<${&5 X^I%;ғwrOe;K\#KQ I%Ӡ(i(\8#NkԠk^1@tEObJ~Ҭ0bO%.v#գjQ&G"GDa\{O9 n>0w,v$G/ОtL7b{P4kU|ِٻ(lY ҄ Qcx%ncEvʓO u)op L6յi0#WM lMpXpdņej%hhu`o}-~ũrc]tm uJ$|n JS "@cmO0:Uǚݟ`(V 8m{iS<;?&Si.\\tnɠ m";vEg-yH%h 1._=U B ?vS2 @IR9O- iV_1PMd(k n$>twM|{{Hk 3ςbklx8ՏhCM{`.zHOKVi-LQYr4wwݠ0#yRKaXuXwg-W(B"Љ0T-+g?c5odH٠%{S.y6659͡u4v7Z$dlYǃҦz 3nJ\1ge=;5PbȯGm:/lrB(}̸/tqǏ+j o^T}A^O=Ba.T p+ z+YzΣt¿̍TpMRQ-W Ϫm1ZCZF$R!b'm,ec>OYO7 u մHBut|ෝ|w%m;rJ(PWOnoPRe2EE|hB6|Suٕ$L^?snfq8aD[L'y+Mk`Dlg)r>rs։sZ^ˀtǖRT*!n ͩ.7#ت\Jmjb?*q7m1 Mjw Ǣ/)=j:܂®ֆ-TsOem7fv])MLtPՏ>nʿu%&I~8 U(It^/w: ҙSlkj8kXo03-O] /]h{'œIFKƒz( 5UmCQ \//-3   dbXXW1ܬ/jI@ĵ2=(b~*x% s.'*ZgmU*AN$W(Mbɖ5^})ŬKM,>N=8>ZAB%H*$ Oyy|6&ŬIug7"Oh Sӟ/HWf>^ŷčgq*܎k\x5:dңޠ[QL8mr{Ո*B:K|Ko&uC6<6O a*Y>Z_>OL-kHU6ZL I]]U "ME/i?toaFS.xO˿JSM}8Ռ3MD+ID5S"kDB%ox W@6g`S\֜ስcyDue24HZ$S%չ__WkllBKoX"\Bѵ:7bANloTr^6)$F~O ┰㩙2gJ(C &ؒ 乔D$[xdD≊Xc#jN a-z6stH5Bq6S-NWlL`Ĵd {N)+bo S tҚ%QZ}/vF_3<ᲩF%'km:$ZPމFo)$jV7=r^ M cS*4;z=p8BzPp(5 a ;%R4 9_a [MCҺA kygڥT KQ0)B+t![%a "hM 2i.+N@Ǐ|m6x~ݒ>\o@r0A"!!(=22B-g?S4shNSιF-3(e3s4LI Zkw_*P1E#.kvwV*%N'wg?{>A߾yw}#9Ϋ߃DzC5phҐ+wTpH ٍk9' \@4aI-PRcBdUvK,goBʁJUB0 ]]kKBU!(V<< R^P]+4HͩMK5U[?C7r(*23M9^IW5^DE1d}e-w jJi_bkFG46~77iݹ~'iYr!Xf ւ68!ku<iM_I-Q҅ĝo鬁G)*&(cM s*owotl|,m;8gl>- ,y:~qw8/pJ)6ڧ?GnNF {7Z2{rxCyH\2#͓}ϊ^Jyzὖo!&.,i'9\RRڃ_Q^J 3u5ܼS0('J)l4g6){2oLlea: 3d+ D3_ph CآޖE)WG_Bw>s2Za@9$q-)s*dyi.UN2 v6MRm*CJ3'c ϘZ,IFsvٖ؅FU:, ".\_f,k׾1ۗSM؀q9 TVDIKҖ˕ɳ }?4Vd1PR砽!1 թMsyY.=ĂNW-KlwV+:z53_MJ PHϤP(ʹW&)a-O=".w<9xQ%,ә!R *'Uѳ^LKDp`A@d(L82>ԱJ]gmmmV =P9ɜPJS 7_M_;-ͼ`-$ Fs bvKs2ih xOi'lR)se$ =jD1bw܃*E<ɹ46(%3n@ z$&)ת٭QuwZ(٭ )޴1=TWP#u1gHM9XrBTjCr3S zȡCu[./Qﻕpk_]VъVUQ%4&2ww9A3jj]Ao?=6x R#wQ}¯-ǟ/As???+|ᡍwo]57oߝf+|?Wg)MF/f]i؟]tn-hPx姟\REwL.v?E? D0Lmoj`lTn8"֊ٶqv6 cT)qqPhs&>|BY,//wv>F|UCQ_]]TS".ªtK<:tns^饓 +ZèDQ{[2F5)m;5÷WqUfԗcl=WH>:ULI+aLdP IʵU11n?/F#􆼽#Nd+·2kњ[=rɁ 4Ћ k|e=ۂ=W;NrbR a06(ڟzc ` wf_J Z  Z)Y.O7u(]諠" kT؂}Fp'tn`Gt KU } ex-Ebj,z.l?b/ ŠS&;``N >*g?޻ E7SG{{Mxq>Vׇhy ǜSo4q6DI&~JɃ"!SP],}Ͼr٦?\T. &i-#~䍜{%Z B>r0~/c`J5%g TbJ °i GxSM)cFr?.fYMkȌbմhH27yng?yr:&&תBŃ N6.Iz8GHOwy=3=_酁6Hc~ر1̿I ǐ֭Dv|\7nn]vXHuMsؾ琔nɨ$C!PH)yRdOAHkTG yj'J8(%ģ^IKGMVz̀ &sGjdrW8nkPv9M:XEJ֑#n#U<,Yw4f`ܒFĎ-]_51L.ԀW^*9{bQ4(h;ð3qەԃ0ӏȅO{2 k9 +OAk2k Ht̺X$t FlEfȚ%^9yap7.G"i*MD3I3@YU6P2ـc2sXyֿu[Jut|MK'ON+.bc2[ we3@+^hZl7 B'+j\_90}/qJlWk]v<|y5|2+^V]b\nJ \.8\r 46e{7|c_NCmeOAښ٤h 8Ʒk OhݚPlV38Jz G Lrn: 兓jk8M;$)23"x\F$dTqR&qt{I锣VR* Bڏ) |2R;2O1/bΩ5sFe4αǛ?v 'FN-Y.q)Rlh u[Rqs=;mndyy k/7n]aږ4Öu,Cu!KUe"i.+E#D]:T2^j< ZW״u[-n0sDۮL `X5,2D:Sr׻׵tJ4N%6z"ˮcaȚ9 2oP[qLQO #3: F)Q̊["RuG_$ 4鹳3V2Ϣ3oQ.-S ڜV 2<_]myRtoʞBQ7ߪ<ds *=I ({ =eBPÝ[۬\FzR~{q쵢af"kE.^'xn]+rgvlZj]hZ1ѸzuG֊B6Nw_Gy:H'PwsR2 1 dY|S#' 5^'nՉƕ:1DBڔTXY |}]T}Ǔq>XF_1!p4\  =ӱ/A_%3Ѱ!ssv@w=:xE݈' Z.*Wɮ90#T9 ɡ_+\UReH`!"zPlLm-ȋ@.A{ R]9,Mн|M߹W6eV BR]0jҖUäGJ[1}yus˕:~/X☍zӫ m⅌/GЋ翝z'a#􆼽#N2>fEkXlώ6?l?pڢ6Bn"a06'zc |0{6;cTUlaQf<176 Yxǜ$бGBJ!CMQ2Rtkg|cJʽ[THt}M~Xz]( ?EqVh)ɘ kYV:4R&.$Au]L)-@wߣ,1}'< >پkKJﯮ>WlJ_] m /yRqL^r?&de}oZCn> Xi9ѐ4~"6DE+Jvf#GVS')6FDnI,CL-ZU `έ(,ah dǚ#"=݊z1ܥh ^Cf?LI4`Qx݋K]W`_h,1T5#DtI.3\x}FWSpkIYlB50zP[5W쳰+rfu}.b_rN.Dl= Psro4nl?}g˹/u[cxοxH1X+ΊoSY1԰)+S ifZ` a ŏ?RQ%qo<,TܑXٿ3]}Y@@׬Nuv`;@) p  mAP: ^ө1py-pM^)@3\^`ۿhaՌ(3u߻gxxtWG}NG~o??7<ݷC,N@:~HmwTdAgpDyuu%n﫣K,{wq~~Y1͇KD``Oisz3![ⓐY;XKBBk}\x{;1B}8;Yݘݽ.olyc>̲NUU*V ld,إpY#b-S 9:LdC jfRbb?{W㶑0X05ڈs|H-XW$8V:(Cĉg(,>OuWUJ(@s)eM b𙶥R.UBd < "jzX?:߯" D'~X4EeP']wgf&bO\+k%vޮgJWnрdaZmg*BdȔZ Dt?2j'=%fB7ulì뀡~ژ8hvcܳ3`27MQ?YRcb'麾H2S w7+WgeERa`|Ӆ708WЙF]eu1&=N9uPBZ{OcRcNcwuo0{ň\ &WB#0VXIb(̝ f V\a`1$g\١Z9IJ#7OkZp41S14ax>Qs„Xk*vR9('DH0c!B_0lD7 gP#s()f<*ngvo+ 6.2֛~8=/5 (!@R@Q]|<&s^:>K#xP!e^OoLgoӅ{ɟBO;>?.Gp0O6DI%_3Zu1(FMꆼqZ-#LҧO uޡ>`L,RW!=Ǝ*wD#Y f21.b_aLԦ/\k ^XONƠ,B4X*8(05;;n ju}HOQdT㔡Db!AGSA5B1Iq X1 D#S?uuT^c;L5q(Ia]cs=L.بWRVDdA)YN\K k@"RrQRJVHB"I.i_z ?L.!0TTVP" Z` AzF lBc "3}ᇻW lirhjgB`gPɉCΆX89ql`L|t;0s# rX1gG v("}U{7:6.~^vtBw{ͷC]SmMN8# #V?gw->H&aoD2E0gt\UCfbbDc3ϧa~TB82!k 7UacHXh(}ϕT0%h$De@2ﭦ8N'A7Ì@R}Ypxc=J_ͣ ʏ"a 2~ ll*1ȘcbGZ] {;g--Q6{HY|RWx Y!l~m5``~`W,[-K,K% JMcYGc+ ad++Iom!? cE8EP)*n juR㣴n AĻ}԰:!y-o *)zZ/aCD<Иa^ę]-{6)ݟ?}148VQ <4廉 Sjt㫻Xyn}~7:~GI M925UqK]ٍ/!:WTΨm*(\~8e7?-bh˺ejiUm׵O+CjD3띭p]j?wsN@ E&Xm\~ydM@ٍ8 lm IzcOZQ(HY 9KhV&MԉE< iDq9']ĵʬJ %']9 Pxq* Z5"^!Uan;S{dcV7f}~]K`~""p%T۟P٬~d )x^( ,55s0;C܌2YG!ו\0RB3!:4ZdK1,%~i'-)RVuȯ=\PƍzX˸s195WuYF65X%#aޚȱ2x򔈙g]Lؔ {qH?~ýޫ3t`)@ʞ{3E 3Sxٽ9Pz;9r,rk+yW?cfE.2R6#EC'Jo;Cfdq3N=7.HUj̝DrPѢؗb-U/&RyW|x.tKO\R .m4uυlj.9Q q垯!U~<64.J}5[ ׀\@Vn;yK `j sNPM,qQӶt,-m6ndxn' ΰ7V%y8>\y{.Ȍwk\ )-vیhՓ='y6DZUq,0*Ps7>:H^d)\d);Z#EU4cvM0꼾̼s2OJ j mbJqXW (oO$i'ps-*o4*)\ʗ@8MP+*\A OVL>xO^j\t1+j#Hc 9U16H`*&ԄIf$#aF ÒOLSdZs~?[֬ .Vph4?=ۓG*zÏj'T82jU(K!2 |Qé@HR]ч? eTx !RK'W5~Ԍ&Om .:c>F7 ɭ8% c`Ox 6"(;,YsQztIg $gQڍu͂پSstnISSA (?Ws'mcXy 2V5uٖQeR؝.bQRkbkP΅[BRq.DB/9yqFi`d\dVj"ΐ-(6BOʠ3FF}+2_}Lp)Uרؑ k nRwхYVn{򍳉3--hh!Ḩ—] ^KM{P%\"iK&Fwt@|w%ƌ6#5ڔ7a0m4#;+ns4(AXiBfxc qOSzT$8ԹiH4G‡|yӇ1p2E d^%RA`'b?1CLm>(vwxso*x[[81S霹м5gt3D2֤~VO/yDh4Qȫjbe]vyK+HJJ9MBh:Tގ^gV0#H]UѱJņ@>׸/exݞ-j$;ʚ'Wi]}/!GIt&ih~f`7QĨ.R֥Xا",J p G0 T1KÑp!"6#V:q\AÉ9QV*U9#² guN4ZѸ#591D>݋ _O{qTa/)eY4I#,q&Tk.q8m{gq^Y\]"D[VU-nzyUbP+ν4Ц\s~/v?GB_Z3y傌,a.~Cv>OL]yH+QHӹPHS%nU$w9R6jGuW! ?ZrB_| ?d^T-UCʥ=4DHa0x51mE ttzw3 mûs+-Љ,}`mu+9&͟r~݇|!~6gxY7/>̦鄜|oskگ:~^go9|#_>{9z__ýJ|fY7=Rwoя&g?t l|45}qoL}7I:a"g/`}}~7kHArɿ'~;Ώ]*DJ}>G, ܷRwdo-{6&dOSLǏ}{3sC~!qa3Xw1tLtt ƟOo񵋴F aYoӏχ؞/|`{>C;Ǔޕ#owv#a`0G$&ȒV3 +vRVj>XUXUdvR11 _'ϕ 4Ư/W>KnHd6zv%7ΟGĠM#zt pһRd6ʄvva2s|=xąAAdvˇAh=b{ -zp+\L/>wd fA {ݏ/`sj`[bU (#;CW?5_L?Ύz_6fRc{9]^K~~vb} J?:{xË&_&S7P`K0FG@$LI>ϓ,QzsTFwqoN[4wgLɼemRF5ԴT篾qpfhLzyxC4~08/uL=ܮ}8Q?j\w"V@ IW#dQAfwW^HUP[ OMgT l)q((6'yL>F#4w.fN⸿O7 -{pyrêsª/a]`Wyhݡs~m Ö́W@c$GA":K$Ofv9gF)NA 9"bWG +NOktr3l  Gb`1NE4 _フ [.<c7:PW~gxgxm6=B.q;rq qɣ0r1-ZI?&`h``wߟB߮PUylCW(l00\g`d'M6Gi4ѦtQ>9Ÿw4+`j1~_sk^Hoӭp" EDDpj EJFpHZmh B JdMŝo1e6*|yVWVJȮc o bNpKYM0u<()B"-DcnH q&@[y, CJ2AG3t1fEBk2 /gP gbUd)0DȭЁ<@7nw }?:ޗe) ],kc{ݲTNJMQƺ D(y+2JĐvRc+ \҄6Dq+$E~iV9 W {%]0IV_!br6 7hSې,3$B >کHadF^#Aa 8QT`琟q0s9WlQP9uHV(d2% '%c 3kfF8'F(r> ce:RDLaX s?T.4&}=qh@[BweB` 2Hi!^'CIq~w]QE&3ϯ* iE7ܐ7twL_{׻ѻJtGHO3~{x<:)]w*&?BWO~8~?yΡz3 ~OmsՐ05^>].It'u_}-/o ht9e_)%P0J0N5&Z;Tcl4\Ёe6;d+X'Q.b-F8N U9C9(18a%3Y͕M0< O4p lC΅i~ؤyf./K gڻI)*5S?@W4FȄ.K-֤Bc@Z#Gq"0 Hr a4[F'`}mAp$C1ؿ #Mx$eE@;pQ _4a-0te'^ٯwx<ב&"BlE%!= T{W&>$N*;om `Nad(X0!qQȀ:KY(D∩X ]>ś*!}*sYت,=-* v2:{J:Vf)2.0e@D \e HqVեʃcEh:y`Wydv&ܠKEo" c["FXGٰ?ŅР4ҲQK0nS"\uM+cf*cq/gbe_1[˾6kau0me+U6hAV]m6G0 i%)yZO^RjSl!҂%m؋w6~l/T HvdMʞ1G6g!5]?mg^gz=< 7ЋbA2 [^dTrq%=!f!o'/Rؤqs?eseCvb)Pɲw7dcj9ʘMcn>-6-44=(eͲp0mxXr~d3c.RlC^ _,)Wy\}w7ܬu}&Js:|feup(]^NFQ8:xap Պ¡tj uB*haB9L0qzg)/QayrpMf̗Tp3\߭>9 P *l*B&tW(Uv 1vgSo(P!Hd i2_W퍽Uo-E5AۓUJ#q$wRO戠+@'CRDq77wղ\˗yAΔ1Yt B`Nþo 4QezxufsOmw.Eg{kA3L&_4o0# S}BVg{8l /Hے1RJIz㲊N6eQV6>hh 44{|4Lv.\vOkdfQg{SYd8@u4#InsZء2Ʒˎ{.9a<#ϧilù$k,]^51)uB)^/FS}+⢮N??Eք>"ixȭEa g$ZƩ,kƕ$z0ܗ{fSOv%͂Q`z{w,%,co,yw'K " VS8oǯx\k;U~+Q>=iR\i S4VXkczEܰlipŃ)4\2S*i B!ڤ*)ۜtnJ*1<򴜲҄v 79<*.R[2!uZ Z[8B1{6DAqz2%0D3Pag\rby ;P 6/PT*R+-RęL*TT\`x@6Yn3_}@:{1dlQ&&\8EIR$64&@bi%&BWtTA5ga˚a G}3`z /=y7g qwz1Y|Fv2eJ>?xNsӭn 0 J*0p BpBYĤĎK(V&0?ov]70 6tkI7n:oӤ`fӋK7^Ji_=Q}ܸ,]hUj]V::9bSx"o6ڠk.JYq >:hoan 3, NɊLtuӓvDvtƊf}Ъ }ܦ .RJ.S4O}~oeu:oh$Q»/6Vl ǣ^7> `esy/rz.m$NGM: t Kpp7HtlJ% "_WNf2OCy.RמCd*-;$9&,.o/0.t.h|NbÜm?esNwOm- {SΉVōxg!QuY@X"ܞjP%Amh]rL9'qIK <.uEZ~ md+J>8=- ET|| p ;}-PDyZnv+ª1"uZ&mjV˓?B/&)xCRjDk.da*f0.jKV#E۴礝_P8W#婌%mf2bvqPAD(fbEMLe#a scF_!tֳlBY &Sm 9 ##3f"]'H~_w|V>t-I՗ ǃ]8w~wA*22ѧOs<0RUkDdl0 Hfޝ],UiPW200fz[^1yKQ]CT]KGX:F"Eټ^wxuyubB0XF:VtqvSm#Y j&Qe$}{t5UNՅOK=LWg~PWK]oЛ|)G@tەF-pAJ<_^k× לy5 +1Zj RGxt:|޻6?gmYOP:0==^ rYZ)B Ly˟a_YqRR&a(&c"o~xv7î<î1͛v:ch GuV^߻t Nސt ]Fw`,b@d!_QdxߙSA))lQB)M2cGa `$@`bͰZ18ո^r#K<{CTy*ZZSGLJXkYC`g[Vї]1jvR=I蜸#25R:e=@ ¸mΥ #"vuzN[I qDXy\Ӆbq q:"1Zf̋Ȟb̹NY:8Q)mőϗyb_hTq~}ݮ麓1^4;rptg+VPJ92&F9KvE`YleMln쪁I »4{0m9"u]UeiS)[^jIxPяf*"]\$CРaݖ"7xjcVW׋Iإ3Y)kO1ԃ H?=-Iuq>*UvsFVr=f/7}S@LW.ZTe,֭ݛ*Tԕҋ:I\\6x%bm\V$#YBk(X tjdLaE8C4GK2)Bƭw"4$?^u5(ZrtgQB=NߐOC !y<%׭6!NAjpe!B09u žuw"AOz*GeʳW3߅4BP_O@FI*cw)<\ &ѩ"eP'^f&#wZ,b%\>m J +RoLqN_iO h~nCެ~\ĝӁ}5Jj\U1u S͵L,6Q !`&8FZ:.ax!@\pZ-4=k̮&pHurnpjzjjҁ K΂b h.QNL\NQZXaA(R@wSR *s̽_*)T;WFJV?9FeYϧ,L47Y?ohD<%Dc};\XSdf81wVbaX\ZצV3. _-v=yl:OҾ:,XEdo嘣'_-ƃW# 1ɆJ #iH 쉓Qn *1ꜳ0@*XAMIC:+^FnXS#kNW݆sQc"%ƜsgU$K1MJޖW=w(ZzPbkmBlO{]gzCqI; FNJ`(ZQX>,||ʴ75POm (bzryq1'%,ys ɼ˷kA鐁,"wYY]&; [ju)iZ١O^zZYU_N474Lu.wb|IDSC^ʨ+DQy;kE= =gCҖ@j`ں7zwm= }mI讫DyB!91f/QKM2Z P Ae=P0{;OV "[ ѨTjUӘo=H3rƼ2{+A 4&P &e"x,8e5 I!bQrL!!TƵ5Z2~B Cxk'rl] C BRkg9[[1[CVWgg¤~6:EA48:EWSݾmeKu݁#qoqRADYOiRcB|(aKjYj3Xf4\GLOkQ6:ZQ{vz] vo.!1֯U~ih:J@oݵ/LWI^srU̥W<{?~o%jL9tGU (rELڒpmdS~%nwkA4w;btuEoޭyLֆpmdSqn{7Zbe: ǨݎEPZJ;nwyr[ M`RH u{b T)E` $5DlzP>l$J}-P7j[Q~='wUt_F)hgL5O#̚Wr6o=Ui%>SdkDc'jJJBG^i Hu-Vp)$HGuR88U@p ʸ,TSp8W+{ˀfw'c; qcC pѧ~ǩ^kUy0p&=:NMep/qLJ_9/ W }4u+3c4D%Jl-,,(\SUdȣԴ!,yTwj7erG%Qyg%h h7 R^mm&$⺛ [!KslY nE1s_좉nntbl/WDZY#+2J$鑑~-"a`PNNJ*V8” lW{+dsԌ+[~9x~ xCFkȑr4gSVQrtA%:gNQs_Wt=҃N{VEJA0W (*FPQTz68T 'WLt,0Hv_S*z"}͟!a0t/'YV1ʱBN&˛\G{y&re$=: r߉mmNנ=~A`-fA8EcFgxP6&h%EG,2- %Tnzzzz^V#Jw.8ɽ,pjiˍK a!I I*Ԍ۵1Ǩ2$#.5hĿ*c?d6TV&7 c!8 Ĩ0 +L k/&!xc֞EjoRh1R`Ray!< KIOIb+⣧T0y1]8wPFkD|"ˣ9 U&Q2x?tŹŵvqs~\71[A{3يYno?_ցNV?O.}?zHS^YX&?.KHHuӚW3N}X&H$\D|4Va#/$ "e8:< x7=yD.58$DR  4VIp4a&PtDȰ]EuEȖyRRk:[0)jMw7WsaG>'dn&Ui!?#`?~[EÜTwӛOp-|I~?'+U^)u%ܟJ cJ0aG x,2  |kׂk2ԕJP]ٿ z$_xueF_8ޟy{ןNߝ_tvkK0.'.Pb6ΕM?ߖߜkY#~ d7}zkL'f8!A{ONrj߆&qp 4ك o-H9)z; ^rO]vvџ|eB*-Mn3s288GVNs ą|J^zO5!o+kB#zΓerߎﶼɞ8;Oܺj[ ӫ_O'w` 5h80_?ǿT;ukԹ Vn8Cب.y ч0_/g/?LQ糱il nގF Sr~R@y/S SؿO'Mڏ}@$LJ~&5SYs Cx|r=};>oə|>S'>duR) KƘoWybEe::,"N {.HBO , Eg_4!CF1솘ndHo \S 2 vٳ9Zc%l@MXS(rgW;`<G Wп?JVzX7+}/|e>|\mdž=Ô- %Vijnn4_7O&03g9MSɲSeGVb`jCwŅ&fNJ '_Hpb_q5#J'e0!Nb7kgƣ]ӇY eNecUT(ry˽leeᜨ9՞xm>@/{hL$w`[lJpXު`Wuֲjxz΀PyefS(̑Lh]L \>NU 杪s૳8vzw鉐+=ګ=E0CV%څՖ(I uG3~Lȍ-e!d$mn[aI#HlXš Ɯ2HpVaBiq@r%yyg JVs,.z+_{iVcvD(iPHZҚ)BǖHbPTP"D!ETV1y"CbdH#U1` 6&PT)}F"L]CKz%{S c(*J> h+8"Ǽ{>݈uFz- "Wޠ44a0c˲1\I>i9cYK<%UgnN]P8ЭJZ$U*H5Vr?GqH)e}%v _*tߛS^5+ޮjSYm:?'MzYU~WbTɦW+ۋ .=8ۖ/_X b_uyO7dTR̳GHyKl. u]̵5ӈ*'=kndcnZ6mQJx1ecA]OlOHm;vf=ҋ/v;;ߩV♨5[xl5fr{uYx"ԩ^ɵ!%kZ-ȵ١yu r_KnԉVva04DzN:ш/ޘ;6$RrgtCk,X}U^+kܶBGOrV,U z8iM՝Jt+Ҷ Oo.V;ײGЏ'!^u` TvaNi屰@AhRIZ?Y9X@gTL1 iG'mX)jnpxA4#2Ik%*ƚHkBR#pm  Sa76 %ʒW*OT!D?6B\O"csx9As;7Y:.B+ J8b,CD'N̨[. vV ]W|Jkhp5Fi6 ECb&a('Ё.PYR*v"Sua >)^>Wa ?ErLPEX •[ 9"s G| HEt (ILHGp Y H,xQWvҖ3FZW.F⏋d[Yj^Tl$DB[|[j/7^)CЕx`PaqR{s&(nhSJF(U/y9X]jkn/"<(pU̓#@R@qu%  F&VhR`Ⱥ4vcS,xܧjKn,S/Iٿ7=pKʳRR`p-amBbXc-4&k3R4DK͐p CR%[mcՂBZ/&_N),{ F2YnХnB&L5NKe\R^)>}b;Dl`k5"+BI \,5B$ ɳc,{GTn҃0 amԃ'2j_Y\J&J_Ӄ@&Zr.$'b\Y&`V!ՃgIAnZ{4iYHfQ`wazi%H^'W]kKKdiRܣzkᅢ4'\fһKWe䭥[4Oa7L&ՒLQT ;X@#(cbcH2`'RbXÒ6DQMtdKoX*Wg>Bp3 IާJ'Ľk5ՌJ*I^$K{iۖ 9}gsH)r'.[^Y~\o T[fSiGwCUl#-A[O mLSIkuzf4ݐ.уl՞G-)Cu$-.BB>dϤdO~%m-tPMyq @x6bwՓ-BJh/]6lާȽP7bL4]܄gĽ@G!Q> moLVzXXo8+C%lkYi-gom]ߗcz>9q+n[su['֢}p†tF b ʧkd-5k.í-lӍdTGĤEv(۫md7βQCh8jnړsm'M[ Sh9Yf z9-nTIQK 2 7 OHAL<#Nj(ͺ]J}ni!5*|/jYRb_(UzʨQT] Tqz_`ੁ| r^Ţ|KQKhn&1'AdcDlL ܙk %66Ј`* #jH$(gbpkdO1m8Og5F̾m'HY_U$Yf|{K0K,f`Lܝ嵞/9tgŵN#Ŝt,Mͻs z|:\\E@M-`rO)N1gN=t,Ϥ|4K)J7ȗJwZؐS 5C o=|sr &$m(ԾGQ<ڴaUk{U(1A]v Dm5bK>إoLTc6Ƿ.@;'N<]wFw,MٜD1U3wKP40MeE0JI>lyT:TsV:Wk8LDsf+̢;.Hr\ώ&c&tvu~~HFvjڌnQ%\wߗQ5p痩brDrl}x4)nσ5<Ӝ}kķI1~gkJG^\2 )HB)pk hv+v۟3T4{j*$䉋h)u綮dNޣv+vn̽[ݪ'.d*25&Dmne1}ntcڭ| OVr\ud0{fͪS>/c Ԋ?~Q|S;x3}{B'%5WY`gkcsȊLZV\OKUu#&gsy@3S b..YUV=4ANjFQGͧ>ꡀh,{Z-;x1kb" ?"p\*^T~0&!yzLCG-SDdb4"%efJJ! H#iH`{'y 2J2Aք1IZ;O;AsEP-ł Xg梕=hΆYUB,mYwz3֦V"m-SҦ4Aڑi*[Zt:Pj/oB̔8c.k}^ܾ(8`o37]zjG? /a1Uxة8ܼ/6Ol*en|_ەb1*b{K1t4j LeẲ^nn:)۫;wv۳n;njt1A n]~ Rk1fȁ4AG/&BsG B c ~Oߘe'~Y_^'t웱تDM?[w@`zgbp-.X%r x"]=k9\)]mK>)hT6Y>8e/B CC[OM(mjŪp+!IV [F4Zʖ88oK MJkYkky7G1a:P5>+&=&@zAoK-Vl߰[] =J=oz^<S}7%3Pz'7<$3Fo\rB2[b#4A-` SW.!" R6)4hIkrT `"J>$8dL ;e jR>u)a \t bZzu%[Uyur/!7 ܫQm}KT u}ȶ>J˪䫱n}n3ѵ͔ڞjvuԺ; $ǿ~IMšf> f%#VNjW&j~-ϪR~]gmZv:lO찧y ?9RHyss|Rva 2j;&:w׆ix*Z\dou.$\Qpe0a-8i\H$n > <N$z-p (\NrpWp CS(z\p+!]Jk ؒ@Ce%ϕjwIb7Th("'&)H Imҕߠ)^r`F^V0kw_|?x+.!ՠ4\k c\qViST9]JgtG0IyGOt&uAIK@EQ]&F$ZX-]^RFX&g!  &V%Ķ#0ۂt` jt]T7_Nn $V|~r7~~ۓp7sMoOOx73Aeվp3}{MG?Gzջ#j&#5?hNonLp7.zwDbӿ9+on״r֏wUQG8;T%p5קg>F3 Zn?Sl?3%'?$'&CrCPSlapcYPJ9:E %&Y0G}{;`M3{StmLoJMchS%\ .xtv@CڿLUA\K !q4އ=ժ L{_G{ld]$nE`BXfʷ*Qtpqy_NQ /Z}-05}ߐA٨/GF %u*J056磧V͚Vr~D % /g<%5ZQI 'r]73|>qRLG<Ǧ `>hy>34`,Z^wа3{N`%v+ATL)Jdz3V*[YMmZuz;[다+P.U:Tmu0B:%\5xjî,u^#3y] xzن3)E UZ~޺'ٛ{:,$ףf{X5>"r<ymK׽-WR)g+4T5MNKkPԔ47+MwGb^V]ZVlL1-ޔ wY!dnŇ;_|{Dހ>ْ)VX 5.T<, B_/_\L33TƆh@˯7C҂J0!Ѩ}R[lY9U(@ರHQM+305;t5V@ÆuvWQUѲe 5%]d (s!Eõ}eM>k@qF}}Ǹ6{dV=/>8 a=?Dhv92pob䵂ϣnlJhYo;x]e^;L NYL| mFxuAj5Ce!eňfU PJg௞EW@r~exkC~XXj& fޢ>L`7h|ț;B" UJ,t5aS+oZyl\F[Y f޿#5|åLt_1'skZDiRJpXolJhB¼Rg?F=I?ZqfҨo)GB WJs[VK_o!M01JJT8B!|ig>'5XwgyS.Ti ۇhY[6 y-K7SH]پ?1qw5: 'ᴱfu!ֻ7Sf7Mݢh1Tta Sr|a`vZbH3VW i:Jab5&`m5 ɝF"p!ibhxV-w%[S)+k k%EAE$Njɿ>V鷫M^yWߜśۘhޮ??\BvR]BvR]BvR]BvRl^/ `ЈK214 yP̸|tFNEJlBOV쏷\Et%'m_vRILSr]W8}sr5^ۯ 213 8* A̱ǒI`,Σ*9*9G3lNG %Rl׉: =%IEa#*kbm_u E#y g[GgY_/ RYVCa?+m?.8ݻ۬I*mI% ]APR &)0e)eVB) ᠌&!GBNڽE.H(6Ai BU„J1p A&4 rJĎtY ߴ=1?3g?-LbjL*Cn 1SEBBl^KB.i3媗ny]Nlv*ɔL9UA(*N\ $o rWS&lHuwؽZ"6oMOO]o;ĊTޟfӛq`/Gt'LLQvEO[8zz@˲JiJ`;pV4 6`AC݊x) p߱VX]z!-;̇zoi ZbRHQ14Y梍6>ƨOB4,Mb5GQM*%-RO~Sg *c#%EX$ֿKޞ?2`Q>M|&kp-A#2V3&~}sw[m">;igO;WfqJc~g -2kloOU9 l2&sCp{Pc!8`~ bK)Œb4GFp(urZ,H&`K*mAy3rx04V#V>+j=.ŮfrWCЂeL.+mEf7+= |i;s >=$&+{{ѳ^1f8W pA7"U? 13jex-6R%i10(E "tLa VJa9Tslu qǗlH > tطTBk0&j -}GvĈ ݲ'ݺo\D)z hL"Jc7|29ޣbh`d֑%v&B+5ElhWîwٜO6*-kˏ7P *UŮ9Zc>cړ^}|g_qؠ1p`IyY@fˀd-FؑVwsY_|MGqօ2dDN$GVGj wMn mhX"BCNy𞩬fN){VvO ",q|v+r[ X;S0.Р6[UhV[ iD$kˏXӄhgT Iܬ/r@rp{O/Z ۈs"5wG8a|t"*P 7#0bRh}# o<t2 w[p :#~_D7ӊMq]1 Ad[w?^@~ku5ލnmAL+͚! 2oW+e깧m6aUr8OH'NŒNj2IĤ7WĤl~Hj$lqk>l毮.WW'IIկVΘ_Rg9 C<"4!ǥc.M%!"Z b%jI<.l wtZAlYpNp%0I Pc On( Ln,:9&A- QkSXX YG5QR*hfghc,ϼ:P iQD OȪx[BМ PLq-(́o y~ vW@P BEUZ'F$aeUs+tX-%`ւq!`cc: .[!Vׁ; X4h*4^IpY,H@x<6Y bxǕnJ=%\QB#K[иuuV[M XyL*7ut̜?CYg}>58}f{TWuJH [g/:]޶ )r743YXyaGaplvtxĊ<|PBz.4OcUV>~4ՆIwZS4XOυm:%CN}^;r$' 9R75ء੮ks`~~.Gz2jQ*`/wTߵ^z(8Gu{D9zV~BZc" XSu8zLI% #(P lE2m?8)mNdk[A1}qɁmω?׈s낆 w`b֤ V)o.&L_*[jA˓˛?-bke.krJ1ސ@~w@ zl5 dR{v J|}e#A;D?Se +x0f G'RO@X]LM1w)1gTb(i÷b(m!)a|0  :4'bH85z7Ul/NA%RN&M{ m,eN)Rϓm!AQ.9R.]^{("P$F+,Yr14{ 4XJYceBKu⦆~C{pVdsЊ8`1E)SLUaY8YL=8Sp)Q@xћ#OHw:;tE+g=2Mk' SA0|HioA9ӌ?K%R,g&\ٵ-Wk;z)SmyvŔt,-E" F&V:"% ,X &vВhSBcp`)5 Fxؕ=qؕj!ệ]Bv?<5SnxLj!Wpl4yu$4x,dW\z?_<|$/.Gz2:_L(w-Z#8noH%Q%yYWR))x#IPM)`7+eWy *(/;Hq5(0{v[SSe=0uX'&rCÐ6|zm9 gv&C59 lk!-|10`x{u0,Gz2r9J&>xcii`޹58t85;"V…N+H&`KUT>! ximۘ^^z/Blz!4Iܑ6hѦ`W46iG#Zktb^+naV҉$E-VmMQFDń KX6zR1 Z z:Vk1) `[ZA oCIpb/ƧCu79;)}n[,hl{]l6h#et"JID)Q7DϸuԖ0u7 JDH6w>n\O1ޔWiw퀦  _%$1jqS"m)F´HW!C_c!?g%>g-w|HsJz2+W'T?Ы/7%l8_ bb   C iDUPޝaCQk/Ϯ+?-JMԚik;\Z|YŅ:bh+ ?b^ƃqyC~+b)o+34RS".[~Fıw,XmUn"(R7n]/;Ljz`U˝KRe1ٻFn$+_n˼_ ) l6Ėdb˖[dݺ 0#Eϩ*d#bMK :-w6^lj/(h&WcG-H\,2GEF)8O"v9M-0#$]:}FO.uJ܍q+Fvz Vеg9ɆQO U;ciz׃Pd O>>}?x14S:`7cwɷ.SLd|ӈ:ZWػe!HV1\|?.JfQ|dU t?}z,"(l=?y*A5RQ ˤ &c,PtJY$vܖ8xddl΅˝F$?^5)F72FQs 6^i%kD8x&AK0 _gn0>9ɨ :cL)21RZX{ZVR_6zh-?>qYZ_E+8xڐ{DE8(~ҲT=lB0#H󮢈GVi8tVJ?i=Zلo$N@n`!$J*bk$+ rH Za,Kr+m\Md 3j bNV>8hdSF+a#Za!`@K6Q՘YA?&Ό"( +u814{͍~`#güSxeƊҗ; 2'ʖ@}?ҫl#TUTBTUqs(U;]&.^mxL]ͳ/gL^@e;9!M*~7^>cMZ%?aG$WK0^!AL.4}RZRéhR F9VHt^ڪZIgNAFY܂^6Wѭsd ۦ0nX+Lmu֪WN87/ GKLj^n7.eMTH"*(oWgP(EiN%@,!"e07ZJ DjAPM?sgPCw\pHa1Bc19-uR[ $;-Z\eYOiE[O5ܧ؄qX- ǻ+׋vv@'eT5.M7o<^جA ^GHtog4dTu|6թkSm[ÕJw:gۇ&0o#/ɼ?O f0) +2.><^Ա/LҾWM4bv6bzV ~t+j ><;rdS|r9 33)L}n6q-Z'NnfP;a& q2X3FE尢 9lY/xZ/R}.M.>!zOLIK\Il!L3C>ތ K2+[,r30%i_6IὮRD/=.᯻"[:u%b:H:&Z,L+;˃bꏏ ,\SITǑ׆Eoڀ5v 6fyL0gUΆDv\,#1ٷ" ^͛[zM,ou#5@ؾ٥ طo %-g>Nk1|WLHZ^ Y?)]nUU4VbCr_נi~T"DGk4.4bHc%1x` CMo4d(?irZ0Ecٶr}wYEz7un[Zc/wKh_U_:s{RTuMd =:>tct5^/ CŚTjBWWb,oo x% QzLa"^)ʧ,`X >?]WG/xuzu;2@8AIBSAA0r\q!;,Ag^[{P>]ǣⵛkY/bހeW:4({׸EY"~TK5b7G)1vt>D(-=2Oe.Du'D5o"H\18EXffL)r\\&0K$bRX>B*AIeb<_>eYR4\)v/jAOS:@o0`J4MP R-"/e#,<ÝGvsڔxc[r,(f^z}Lb mXv ~ <)I]RkdXyyJib$Tq*=%R9 /u' !K"Mkr(aO.gBrAs]a$pgRfJ{9 LUC<'M0cqt^ p9"Bl=_'} H%ATrL3p)k?w'R s tD#fYs乁+h*?ʃ +u 9hK=LFQ ?rK8ˁ?#Q#nO܌n} -LD/ݠ*-Ak"- 5xs2=Dx%a)qh?:eq+6P =>CVDW}~2 D:)OjYU"Ƙ4?۴Sa+6 \?Jȣ7q5LUJe02&ϗ yd(P?Lω2tOwj=xYᩅsVEO*L/^oO-EHV'E? xRp؀ U.~vjgZχ>ƌ~HntF9Xx L偦X1??3{w\A5&j~1̄*.L Kt񡟧>H0 [b3orS5{~'/\0vۛVMg(]+'A%=\eU}.?q֒M$~gt;o$=[GdS\n8\uj0|7mہ'Ņ3fnJ:[]o-֬k6Wk$M_e$Z_)CmL!+)[ \>蜡.TJdil%SrŢlxrZ1%2?GU,fɣ4Yao8ryߛ~k#0/:EDƈt%8d]"ătbwo1}^V{Kķd ۞@%/ * ZLTwt}<ַ1cKyeJsp;{sYz@  n3z%O,jEMx850F[r<,I(*j6xG )c0MyCSud;x2骫q`͸F ,$%! Y%qUG[ yn4'LFOd[\R.J4pI:SރuZ _J|! (hV@R! ]6.X1( 17kc؝)_^nr+4(ڞ.rzz-bQ I~%G;g_'n|sq۽6q7x?]MCh ⍦S&t0-B{:;$}`%e>Y Da^wJm49yd sKO\z+[$䅋h#2eT Jx㤋u.h!UGM6[ݩfr5o9^;;Dߔ2ґPۅ(}Hȣú &B@$)jA xJ.Ґ EUa ]`K~҆HU.`~ުA%F-=gAnA4L 2y߿O&SM7 KC-MYxkNYMr})oKE%K늚23C[3I˻-nUbe>7Xp57zhޫw?GNËUN: DS?/ڛAPl؍ VvOŊq^fHh֌/I\*$YLj'{JRweϾgQeispXBZ<jyBTSO>9~KV@pXH8„ЌR᧙IwbSs SS~8jOz׍ }%pY-N_z҉IJDsq˲;*')]NwA YVAkB\tA e"sVd?N'~.^dM\*з}ꆵs(Aav FrYDwJP߄R%yǔf:L 5$ % r20d|_llJU4:fQYȄ+wmEJl^] Q)s΀YXftNr\BLa3C]mр֚pC_!xͻ_Rmk*ʩrEÕ  h;D_R=$e3DzMQ{}_]CZ2unmQzU򱲾BV) Ѵ##DW)gq?o<Mbɋ?Hr @YWPc]D-. uѕm<6Eg`Q&/$AQ0::,=xf?^W2<&! M˨,L7D 3moh"[x#8)ewˈ#wٗ~R73Hh{Pj'9ރX();諢QhkJ$S{Ο[!p@ȄR*H|Ƹg(l>d<QQ:CWJG1;!(O1c2j]`-Upeg}/;Fҧu}76K`',t6j%pȨCޖ5_O4YTF8ՇźYя܁uvte==m8>f|ĀGMQOڏ;]ҬGwh[tu45٠y_G3[ rv?nw`zFXMq}~ԂRI; $U\s}T2ԁ7\(&>ۯY䓝\ŭOR:#,dc듯?>B 7`٭{Ŝ9TT2FC,xӞ)PLHzEhYt N6#[QM5\{"P( ;x|ij;i;|ˣh"ߝj rOLM̀rȓO'#?s y il}zDe2|󶏋^IRS+lA_"z0}lX0Of?myB=OQ) MGN=ZPΔ% %{KeΈ4: eFK\/5M߿|ޤnE{MzMot[9tENf?gjOߡo|ŗKRV: QʔG3Nm>fzcj+RKkvKVZ:4>TF({|Qsr!Qk"a,ewRqGfy%RFͤwj6|?6Mzd 2`T2`+K7|u؅wpڳ?M넝v۟}שi) nPUiYV'Ej y#ԌjM᫳F6!yf5R;{6wy[ʚ5hM!O v>4-y+~'hp6iv3= v|їч`/g>w[rGą#4gxH܋BHY0TcܭI. 0.IZ:)fw}!XA0ڈ%ȅ B ( HE9H$eN:I "(-q\zrh}Ԍs\ l< Fu^j>}q Z"3$+rT#:x0`!鶁@XŜR @yydH 9,";V(T ?}logy5`2(4}#|6g s6 OESDazs9;'Jo&̮ofglxۧY 0 nKĽA* IGpɃztmkg^ah{hzef;r$YtԜ旉 =nspǫoE&>f|)htw^^)ۻ6 ] ˅a uya5.F3cˀ3cYD0JCV6#Ђj\k9^QiCer  x"Zbh+vF Ң&bl{o!i#B} ukp'2m xnEj`-?_r< /97ᴇ3ny^(W"99Q9TIƤgsGR³<5[ |Fak](&Dsc~N29?jFE:M'QDe][RDV6Sτy%~wf(G0qkR߇WH)DeWuz=,KOcHO"fNF' B@yZ&ϴd>CXj5<*ꛀ.i`|qsEo]J9PT>@KfSm<~JH Dm+yKxUHi@8&"yuBhqlB!w/@ QD iHmLSM'*DP77ۈQTK4U9C+ŷV~Vj4.(.`XAsT:x$9Nen֏DzƟ~ΩI6z <^qOoCd|^l˿53_B*iM?Q]zаr0R!L\!;y5W_IS{4cu[z>n^E͖Uo_zQu N2﯃z-y8ǻ_ zVDƠb8zѾ ^;Gu9@K{1i9Ѷ|`ȥΦ?ⱢV&c/:t{ǣhMxRp;;op4~>z'uG̨$8LK.SSުK8dM8䀢cq򃱜;θpn%3NTBJ>8o}G\mrMH qEbvBMj*jF#hU5NjiP!!RiDJO(bA[!*YT+6bҰc)yMx *4"ZKUsSlgjV*#?BŇBX!RHHtBQ&0 1(Nv(Ig8ZE(mDgPyoT$@8;:v~9j)-|r:FH٧;UP}Uo:es]yvڻKY~,"=dM:nPSN扼?O{~h/{3wݻm?(*4z *,gM@V%(1!b]tY3m#!"̢g9QܻS9c;fg^H96Mseo@ /焺!]0Ec${&/P:@'ۂLE7_Lo{Sdd`´8g Rcss͓aF*0$Uy~{g'{-ւRrc,R)m iӿ;Ʉ EFtq] ֐+3!B $ S%/p?Lj)d^H9gYHI];2^dqYSf(Hr'QKwnʒߑ 9/׹B]`p(X1x NXCt"N@M3#i6 m G=֤ i8ĬAYc= @@`GD82Ud4c-LtLAs%XJRJu0LgOZ `%ѿ&W1>@ ٵ@0ztA `5+kau굛 ک Q.PT['!1 ~g`Pq>GH_4J*FPqIi|-_҄ChI v>g˦w,v?f0S]S o}-\X/W.mxiG1bȕНգeo+6oaUm\ӈO5ЖDLq_&nw-֋Hj$[ٻw- 08}>fE9ٻ.Gߑg H/1bC+q9ud\ۭÐ2e,*6Mo+l`N?CtK=jVsz:s1ÄU3K~&=h3b%,ɥoʙ`q5Mmk$ :*%<.6wnq NC9'{zA+x_܆a`>QCNe-o2g~3`^Sx/]Uuw5Ҫ;jާ[SUg.n;ŕ {x7\ r1ce`]P1fn:ÛgBY vz{:`s^tST7ҖHpK :2}ZMv!LV˱1QE=~޻M!NzhI ~!w֋t*՜Ͼ+"<#Ә1"K{` }P`Y=,Dc ÅNAoU;E6Rw xv?NZNNPy`>5cEJ.,&v!w8Į੬6\_~4T- ]HQp|HߺY9S)qY"!0h. aH*.JG18EQU,2WFFK& #`PIVV8eFZeZɖmK+?쥯Avr+Jh~ŭD /nր(ו`.NZB5D^,LkJ,)#ʪ *BYyvy~1S\tnj4?Ś;D` ǓLjĤd%ZAo9j@9-xu6#3䚴ܻDDH-19xr=6vʶ9 7BJtC$;w}ؗIJZ1u?̵m|a NcN2G p QIwunS }r9ŀՅ\]cC*m{TLsU@pR=FR;N}_428S!8EE;](~**"I{(uTgک!ԨDW +mX1ډ vheZ7O=4Q^:zDZZ[.E)B=RF%B oz|5k<.WBx'v,JMjRvQh=QHV F/[p8J7AGQIw[B]P*ߢ2QVꂣG3詄LHJV1B6h?445ˣ.ݼxOJ[Kgbqw3[V7` vldq &jBw#֘+%X#YDU_+e3>7ݖW y52\?*ԪB{eCd^mBk)%l\Gr>]֒IY9ooR(8][q}"ŗ0^an1/d5|պn_0TLQl1SHAE^XLU m*d!-ɺ }QW^]/>~^T@ǯS9)uТWu-sG x.g?_r 3Ʈ7i!{mk¼sGǷ|]+ޱiߴ,S<]f4cY]u9tsNVu η_R[ig(wJZ V_Ӓ`wk{69-}d=yzޔ0fѕ~=|ރuֽ4XeQ=W*(>oz_ *ѩQ}R -ejݳ9( ATjz0"9YKhbXW-ghU9!"\TEOψ=(וFڕTNbt9K C 6īwp0*9Y}q:_}臷7[=ė4Z>"Zz3]#VZ2?+12EsWb*yLŤWp?n/ LJAlESU].R*P`5X3Մ?TrcL5MI=rK>zKB͸RLZ~$y.Y!(D}fVS?[b'S(Gh1L* 4- nQ2y^h9ger[xcav0e\\{weVY$h`jvVhy%g҄8)D3y bSs` Pi@S:M!P,G DfBjZ)?;" Y0383f(}X\h13+PZR[,`?q[u g~}(~3eac'|\p{b>|&X>CyKQl}-Qj|N`g}|걎^ib49Gw$YaIW`#%J{/W|v9ss"|/+zϱz?OBJ~ˏxKfK!D/0+$1, lssCTcN<$1xnʨʾ@+3P6 +C=kST%Qg'H R3o+H AʘԘqĩpͬ*@3:R:5J*q&&P-i{ \QC\ay@ݱ8FL}.Bg8J8r1BαYr%eZp3?2i>v6\S9],>i"-#"p%yA*hv| 2j K&5ޜ`QS/#N)^ @& u!XZ:eEwΒ=ؽ[D_h2ݗiԂPA+=Z ArfXrڻ@Y"Qن F<¢)#}QLP=TDF_\H_0hYH aX ySӴh+*mO K*%O Kb +M0aVJΈ'`TX)8L:.јc!]Y Z̳(RB(L>ǢrZE<1Esڽg*YeJ捸s˕ t <lҮnꯚ܂W[ޔe^{Wr9X'p.yd)a&wۋv+Y~{t &rTR7_^}%(\OV~{Eɘp /}BS'^dzWL S*I[x %oϗT's=*C42V+_Esyk'|Ml4sчjtt'mٻ&7ncWXzIJfw4T'KNNΑˊص0+ڻJI`; \I%r| 4FY#i3! <ӵ%Ϝ{3feX.V;nVn pC0$*ud.v=SRi,W^),|2`/8klP X࠱][QuE"JBj1AF}K8AIdF ~jqpG ΈW;I1V:qRL cǷH\a:sKhL|/ICz#9vyRAǪtQ" 2~e1VU=󨇫o3S({'34T]ǘ5"Ht`мtnIzu࠘ҾHB~+g{лhy+ot΢uՍf*]hEF0yY?\WGCaƤV[GJuTc>.Eԧ[8RɄz 8:<8JȄ :Do>D\cLMN^ḕlu pV?iͨIPs'X0(#((0HY2',vĵdN\nl[Sfeq6xIWXl.4~FN p$wg׹D"3ĈRERn}۔Lb3b6$- iʾ20bQ_@]_1ܞ ,!IM)+דW"ԩDT%l GEDRf<$d6IoYZhQ0p[cv׋%T!.eIOm$#kӴWE~@}>OOV&+hTbBd\?x9%Ut0w;S8?/lҖUFұ{6VqD$HLB5*S(۱eK4DG]Q?> P1WRK'>A}sٴ窡޶B*nDp (fz \ .7Ƈ?05u(%.J }^#XwJWۻq?-&vzaz[bvg|K^dllKl{uu۵ h%-*E9VEҪ0blD$,UdOƙ`0.ECia5|3bUdM%2~F>7WF)NXkG))Q-/*@pR܇7ԧߝGl%9DPL^2O jE/ѿ߅~h"zGF7!iVބYyfM7Yjyb6!D4T#")<' K3 [#9LCkדw~Tg5>Iӎ^O>:o޺ِk2i˼+ue8Ɲ=l#eXŰ]gÊ;ذ> !VSE1㭣Ud`:ɉ2BALYf)!CdX yVdN !k/ >.>xFer2]xdrOL.XK4E9KH)-kAS`wsޠF9.a( CtAl4zH)=^f4C( :Q-U죩OYr⬲"m =b cV1r?S/VXs8iJC^A~R8M%)ϼc22pmjv,c {F7PBk-O!3><:)7#,PH߈!~R\g*7Ŝi2L R?l Sz yk=~ )3f/,6VOOcל1}fȻO߿*=țU "Dg߾{r'^sWBo+~|A]yEC?b<.%}t8j2 xsTVTc.U0N1ku4)M%0I Q+AzJ ɬJ%G}-@<<Ζ^y/6 4`&Ax* Mo7|]/'׍-W:?otTg{  7K)"֢Es) v/Zd ED1+~K.UEKp(74(ۘ ž=q25Bg\giv?xn[HFBwq5>L%BupcDn-ƈ-3lG0FȅO8F%UڌF Lրo_f[cKPt3frogdX]M2mvjtV|T4)~{O6QʺVO&R_]اgTh@Im0Tru>=ж,ގG{i@ertL*\˕z{we=uܳ!Ѹ&٦Ggm=:$ےoR1Z޵%)(T8]Ue" ]lnp!Loq?G?7Cp dv =x[0&wv۷W-L;ńI;ם@ecc Ź(-M2kB4Ot,-=C<h#sz:{f5wr:Tm'քJTm_K U~.RmFH;0)Շs@jʤ}H’Vݗke|?ޛXq{>mɤK&xS w?KNrU=n톐n}Fۼ|O׋vv Pon J5WH3eQF#In{ ]" `4/E >\q:2QOUjF {X>q}zL&o6!!'iIGNgK0wP%.}ԡ_ttpѧ|+ȱ\ݳ+S-@^UO3¦u7/6"w3|UXek-LR0M($/:4n~셏r8nD(ד\g3ɊG䅫*(p:<o5S칬ͭ/,.4B!hLj Gױn JQ؀֭)1Ciu[:lQҺ5Mnmo|6SnRLuř@0-Un:%5Q]ք˚LBQ}.nؔP:%8$6T,(Z1֭[ +}XQG3鑥m n{>(U?<G[X}HSEF\^&l4u N0+)EVP˻eK^B]uEyR7V-lBҀaz?׀.<:LI9EV]%1ap|4| zxA YMxm|/QZI4(WY9N m9] _ Sݟ띲aWNcrlpd3ix89!I0vfqQ]& 5ʦ[:ja ff )IJ{2А4Xh- h2Ҋbf8Q fRyP'|R<Kd|RۿWFߢdW~ ORB&6ŌPjC^mm'(BZޝ*AMRJ"Bj<>xX aSb5]^ݝ!܏çX=8 k{!o^]wzuxӨt׫׉"T?ܱ1kR ں;M3$Em2$Ώe -bR-cY-J"VfS)g!J4d@ H(wFeq N2CB#ѢޜPQ:>}p1 Ie 0lfR3Sj`9*w%`-V-'e2*v2H!XnBx7N[$`(-ew4藁`9TDPWޑ(zy̏N-?~q-Xb_^_ { _W,RC8eՑa1>ɟ=3 # >J"I5wd|(Ur522=sz:Zhg)!f`㵮S$ uj`ZPҮ:>bHqC%IKukj6*ׂKknSbj6?#' *(٠fy`rp> G{]`ݧ^oBqya _ڝ.1l6-vu쉵@;n0 M>:jXt iL֭9Zx1 :fhnFG *Q3?$¸QC,y.٨eKDMj榶*1p: a+0Y;+mfwO/TbSSNL7#~Y:{|넾 hg[4( ʢAYyϳ|dq@>T*D]fJهW.4Br_fD]ǘ!ynEwo\:I_a28\;ȗ 6ֶIN2 ߏdj{QY|bH:i O~'>D_;Gm DG[](ڲ%:bRIS1M @rH {<'⼶흷2L!c:(]rp3P'C!`/;<&BǤ%o% wq%& y`RD3usTML5Ra~i`e€0Ί;BALR+r!C8 rB%j!${`YIVRȱ]ؘ0E; ǁ4ӄ'ͥfSH˓R.O9?VO{y|v0zR߅~ KY ʬT)o7~/(wv??ퟟ[~{*Z"$"Gal5>gVD(yatK?-B9;+-^;t9D'KwrCLyBҁ~E(oߥuwl> a"<>4?7Ofy^ez[-U*yclLL㶈\cƱ<2[\0DZn1oY0/VXTfw, \fͤ4<6RF4p3PʄFi=͎LZ,7c؏AH.A*4ίQ@)r@yXLIe@`#G{Lrbf}ݫNW.?\@|t[}? NrJ(Zu7q*gdlfΕ. N*!%"HBK2IVbo_/cQ,^K-so {ߠX=pV{?j;` .T[)-k,"d7 UJ{*ׂ@гAޞ@@Zi$,*=(N]n7ҖUCZCũSbPNr ,R8J_@"7f9xD!0ٻSCjIfͤ{:\ϭ ԯg w|^:zJ_[Ee_]dS^>dUXh >t үn;~ُm'ACPEڊĖ!)7FESVL1>:7 6 9r6 p41Mj=Mjᨇc pHTm:[VD̽6}'fK ceYR VwΤ[ܦ?RbmRNаjwgZ_D[똠/FߡM ŃRM/˦C @B\K^rfyI ScG'"B#rp( R'N_ baց\fV |Ƨ]9aSvVqQ!|.^%ބO]*?`'nu*p54!-C,̞ ?7_#]߽e߭w?mc-ybfMNAm]J|a6k# Y^xzvxY]uxM vYrݘHC޸)Jcnm1QmԱn+󜖃rnlDZUN 2-=&p&[[ bTmu%<,][a-qmSL ~=ox*[[ bTmu9H/ںiRhW:EQ9 6|_kU>s-{ea~luYLMIᝡɋUG, xbNUFLveⴴNPUΪrZJ+Ҭ`qOS0rҥ640"AC3P?*Es/H*%cIµbWw}*n~ݫ8XO%kKjZ_;3.G2 z+@a.AgM+6^.ԧ2JF]t/Ʋ?:g)|M+GRTf+,CM\b|  <Ύ*,yjTGld@H(q^;(1o/ ew|!'o}(;<h+KHQ5ʆ&+TdimV jۑHq5aR*FTA0ɑ2WEQnXlL!Ėb YN\‚V;Q(W*-%GfmW(c@xgu8?̓3f^~p=w7m~8ZӻMۻB#;O /(U8D[ 0YCC>!I)2LpJؐ-!SȪ]!0%fQq@F!pE,;0Owϟ_1Ω\^>O,K}Jdde&n:_,7񷧻xrEۓ~:"+M#JII|M)c,dO]c瑠 9! )ɯzigtT.A5%[yTF,wά,cIgQ1FVO mvrp`b7\##P9fD+++/B1( )~M*?R|mgSv191QP]"!S& (A4 Yow2YE[1(m-=_a fj ("Eڥ|ϧnv bïz^ˇ$d@B# ÁAbTfv~kkEMz I&'WgpDNgR۫C$&=$L?F[뉙H6wأʮb**[Q װ[?ʒD!10YfA䮃PEcÖj(ϥgtu )H>sB!yA^7u;d>S~(aTy1`HZ ZKMT^\J $IlՌ)}V2QP^k !|/ci޵\bYj Үgr2S;`*Wf;2dn R<_gX& *rۃpq~Bv4NN|tF<pA7aQE\_pgm2TR rMKK2?LqBjG1:[:i헎FSΗRr㗡QUH߹S1^ ,aԞ+)Ӯ2,m6JNI iյZҊ|3b[AgLH-yC1;4Q9 sBR=%]},aF:<::'I`$c׆:VG_#Ĩ:8i4e+!QW+\^X >;OEb5a֓O2tKr6NV?e6A-*uq$4)$pySs.pTΔ֍G8NuYIJYqPC|9ۘF&z Qp2(EF,2PPa1YEvMFMR%lGYc6ƈ .2e#q,3J<ܶR өԀK; ⱵKB8ޡs:vM!x$p˱4R*Ox_GYO~*I58cfcd([rd76MqxAa <$˘d:S)i "pJ4qt'F)3]/F9q)1^iݧee qWgF\G7m\(= hfs <ՑT"?jy3o'#*4z@< `4|8XN@8m7!L{ATt:Or!pG\mL]`~Dx ab nQ1#Z\ib)S6HmCq5kokaDQ"ps\UC> $@K Ɨ + g`vDwEtxc~|sW Mb֛D| ?BhwЁؗ_.y՟.M w݄p7p7)uqG ӆ[RhB2wmmK| NH"@/k;Aga#NN^=}( ߷zHKÛùp(plGvW]骢R:38RKgQLqH{|?G_{&׵[ܫ:yc<8a|>ofٵ/Ly4\ Mjs>Fr!4f<"MXPYKr&Dv譜`4 qU%rPO;Xđ!„rv)*SCq7IRb9 vPAψRubQY&Dr$䷅:::֛ Uh`ź+6~nsI4;C?p 8/{)=8đ-sS#OJs/(!]*N;Qڙ /xqce_-7vy∭]z60Om0x#tպ:Mb~96avVAy/ .ۯvtqsoZzfg0, W}cD_)+Nr>= j1 yI(IxFR\0? ALnx(- N M>˸O7Ci|׻=_K^`N&1.ԩ]?ށ|qOK}4BrKD:%S*OrJ.d `FCf/PĔQ,nҔ)"am=V8֌!']aT eV%^LIF+AJ0BCv֗KHyX+ ŀz j(!Rqg.(Qc5*G F ԈӜi'48S@(JńgT`U 93&YA,T>Vf4TAPc]( x\J2cۿ.gf>'ЀQ1.մzu9eQߑwo~/<0͇xDŽ~zAD ;/?h2-f4çx!t.~Lgߏƣuwx0M+Qݻ'fR %QbduĠ 7 _zDQ848 kA?GNaGl<)E~ ~'b$H( bO2.6>y\k1ኀs4{s 엽dp>Y=[*I},oʺ-!شh)vs@1R/m7Y !QvrghZGhnh*6~\iSg/qrD6{(%+:nI}=9~]Sq +jwRxd\I9ޑ;eN$|:XWyiN;xRrlݫgPS9np Kh? 7B3&TW5Ww/3 $P&:R$\X@*ĻG8s6qsbJ崇~WI@j4w@?ܿgov ~ʓ)✷_i#BBCmՔnLFp>$jgvʋPRVA1`a*IƳ .Faw:q,-@I$Z\ph*A)}/^(zX{2xJU>.bպl!9oh|O XZޣ&^^aB #5ʐcn$,T L)4E{R!򩨌_S$`^z|YRyuqf\0-_D Y1XX!@@8D,s:$Tcl̫J\BoE j E2m}яV3m=~M;g7W~Ǟ\"^3t~ jc9i׿DoME *[ǯdž =P8U4<+6!` ,UBwӘR2,(R[O$n=1ӉѶA2e@8Z11> Bl05yS &̒pȼUNQ IAx(rEP)YV 8aAJkjnX6*{- t$aeû`Y:j+JϗuI!ъamz(DzDThA 2z"-p]A -R8lx *bu<Mcr @ʮq)A u[TOb Wa}2IV!9o̅}Tu":Z@Z_$P?V!:T!)P jc--ED`ǘЖ(Y_%%`voF҉xWI8BekŞo k)(%Dʱ8>ꃒhrnQ5Q0$JQSU;wkT |7`NQ44o#3G֛L.o\=~D~_)z31 x9~܅hڛ.&xDil9g)\_Sm= H@jQ@3ځ?bo1e0/x9)F4lfH-C`Q :72'P.f_@ gv-kڲ*6wlӽ`MrB@} 8io+xTb<ĉz3r^f;z3}[ Rn7#ЄKa|=Me!\ s4iH|!IZbENsΓ-*94}wɷXJr$k0)ti~)Qn&Vuӌʸ6ӤE+rNFoh+/Bq)~HRu"IVA%'b$8Uo? xQ L5A"gַ$8ѣxsPl%$JK[3XS,_.W@|'s6|壟t:З`Bڴ*FVZ'IR:Z$as^;v[gf2jj[)$t9 A-縐*EL<+NϊKs sţ4}…" UiZRQe͗ޙf# B)}W`lɥcjojIp5VIL(5PƫBwo%zgLG`jв%DRpڿa|~ ЖytOk0CfRꃕwz_W PPo c}X̋h' /9,NNo`;> әpjf?;eyx^x.FSn]f[ΤQ%EIo3BwV ?F<ޯq=sї޼sr]۪>楆{ 05ofٵ_-;r'{# LP~`!hmXe[60$:IiAɐ3$38E5uY 2byEw9 F2˳l)M.̶eRHWuǭSC[sm;/."ongw7-ŃzdE{POfrv3SE9sh_2!ѨAV־֏nnG*d M DSR5R je363)D, epLͼǖ0C6&B-Ș13rA"+H(b(8EU`V+% 35d9;<̃I!#Qh3! !iy!,@ :Mȉ߂jA  ̃,m[[+{/Ci$]xlײ' Cɋ)ke"c˔xŘj(JBabhFeî Ufa4 IwI -0fAXy#L 4GVRkfa-*ZrT5+} dM.^]4d _d>eO'.`gBuEgzuH+ڧ?/ )T"u59 bn)o/El7O+H)rܐ CPޣ%Eu M{!CKB0?Pbr*=O |N;zʎ33ϩmY3[}h %!xMM_z鰁/A +ߝUoLR`6t{k4% ,`ÂX&D B*f,ea)aNݚ+p]4  ?HKy}wqoTaWq V@ߝ1(sryZ1};"ɸq<1A|Q(_{(~h{#'=rAS_pU-#ET{l$4e-=uK9 ]'Kt>]{2"\D_"к>Nl6=( =}xmK?tڈ)illtV mN[D)>J6bO^7^@ՐHsG^}ūH3B{=j I1uőjEHWCGUWDPH[)2m]ֺhR\EVY zk+D#[8&[J7>QB2'} NSY'E4$8Um:kmGM۷r8o_ƵW JUq6z8UYb{yt(^g5zϴ?^+.XTNNnkTq}0a57lX-g[!mIDNyQU]}mzLǛMg~t)\;>NL:O.>f<efe'Ǚ;Fy)5h`PpC) L'jud4r|:߻JB'u.pf%p FYL7#?~v]~E-|q6* w]>MtMjLc _W-P=w{j lVp>|6oOڼS_Ia<.q<~&+Q|~>赍͂s7.>IOڛt$-.|;kMPP]D'n68/\srx0Loqz7LJǯ~N/8+*R=b'4!{VO o֟ ~]{q;_d(^x=&ll} ǟ/~}bqۯ_]ۅ:K)d%|%WwVQxQ <𙧦s ֊IOހI> f^ g\x'`pzzᳫ'ND{@) t"<^^v̮]i=$x:eY~}q n rsaWpVPk`nqSx$ J]-K!fWtVFW'-GEaR\AQ)]O7 u\Szpe%/`{5{u{7-jiO^x Ŀ\.\- h[x>ؚOf}No8𑽽s3yÁo&y ĺ8OO2<>? &A8f \կ[p8O{`4yEYW=ʾ<g9܅S2x8X,K3}|dlRoFCwǓx ^ K f`NM'uЗ}Ƞv[tl]iLO^' "Glx('$ ͊jϋT1+a^,Fp! V -\zE/`v\: 1 -S&)BH+&-1G4*6ENL1r,OxXK+~"m֘a.FK_6Ԃs Z´*L< K`9M6|i]H>ۑI'< Ca'ĈclPde0"yXLnK)en%N{&x0+կo,$ݤZih^BJфʽ>M #9 f+ΡG]՚jSjс4Uo9M؎nv҃駼.(g{tVEޖ4.aڢUa1ęURa.[3ʮ%>J* ٰq5Eu5SώQT^]RI%s3=Ĵ@<3{7;)xqBU ~"d+vm)?.v&BjRraLY8pO6ݙO:]ɾ(q) w.V+ MwK&?)X a;ky<_&r'ˀqq}I_  ƮvCl pvGNn8]tu~8->Ӹˬ"A$b86T 8 \q,F! bBcBɐD0!ړ1~ї~F;e+wqyau$ͧ_nTsٙJS"AT򓻭)HMDR\?jW)<ܣ E޽ƌ2TO nkIׁ +v0ĩScK̠`,7Z`#JFeĖHDHLY2Ҍ  h?fOƑ}N@ҙ-dƞt ͖1YihBlCӍ`ՙ A@,|< 0)"n5JhDQyQ 9A8F PFa@gZ! Dug4uJ!ʑ-[Mܱ>Tʭ>=>l-,onӛAf11'7T} ?ֻ|ʧ\*r<=y}3oxԉt<.~yBlP6lT,!QX>@Ê^E09ge(M-!(k:+`iXbBƳ\<Ƚ\gŨ(u $)m95=]c:=]!mu.mUhAՒh ڼ|6e}6_Q>7(e(YӨ tߍY J۬EEcǛ(_Ox@\>_bb*|hتib@3ʥ7h.Ej0a׷ӑtw ܇`9#[p}ٹmFkԒ}{/Hwۈ)!!s=x׃rceySwZFmxiyR+ < g}GR+8UE Z+&Tluj!GUտt~1 $7u]}ٜjSP%[CVu/Ӹߥ|ӫW+-[jN-x-*]*tWPUZoTFV76ǵsFĆt4u\+W%P&om?_Zy۱^+FH*ģ0@Rd -KT lƸPmx`с+Z2b h7TxU0u\5c'42w#w?u PTu%)A@`jGDMLUlBk5ƚ"c P%|0E@"RSFj[Wq4ɓvv^hp?~ ^Awiٶt\V`DF^vT4jˮt-U.65QRFQi^6pOeBf<9 wIcZӺ9XӅ&R^GƽNXLf.Um2Dca]N >.AԽ% aB3f;֛0uumM7vm 7J:c;S^vȪ'aQM^g{=H ꩌ'NO7cFݭ Qk2| r1m[rh޿.m-9{%JL0ޡƔlB+RV|kkk:F#)xph#qd iuF),#s7yOԮp"2GVmb_C[V;oত╿?X.!028j@bBHYi$ RʰI"<$$3T!U"GX38檢z;eFu'YesOXFk;Rޱ]H^>yO!9fWSR䑍o`y߉ 8&&F]$2F]QwpvImԱtaw˹>u_\i,dsJ9|6 [mضR]{ Es^ ء 7ԨO/~54 鷘M{yLg]< C8[&/ݶ7;_ K\b&ja=PYRtIt =l׀u}">0Zp]+dHd-km+9K6/7~5A Nvc&dx&[MґDR}x.@4_+yP=PK&Xiǧxiǧ&2A;>55at|_=u|j9 ԘVvAqvAأ}}aj0kubmȅOrJ&Qin؊; .G`k6a^|g ` [PcϮ" 6DbO)vjw0Y/?U-}Òw-rhEu-zۜdV"n=F2 f$PÙu=*5&8p6\D]4kIeӬ\w/m8iخUO1@j);c̭}u/ҝg拭}~B4!iMoBpMhky?h GrޏhM5]W-#cBme J6DBcޒ.rPl/VvUرFƂ9װi! lߊ*֝F+^h) 'W|0r/gEX3>1M_3{}d@؃f֖F ~Q)~ृ\!憨"ƨq"?'!G2@P@B`C+!(k>ʏ;kA7gY׫4c#)~R5Oqui_ :JNI}%fFP#Z]v8(ʞ O(V /"{wy[mU-˟3޽q,[,]||{-7h!MN-znfdYѦ`sJPP-[Ri9.n!EWmq zo,R"2ɧs43ELȕWmM6 lZ_`w#uUYnT0GJ}Rʼ'1)ȹ^e,OH3jY9<1i;òLp[1.gC!b9d1&:[|Bɷ^x! +(ؒm{|{R.tc@zEh< B"R2EJ3ɭ K ^$BL&bS L vmON ]φnt WB}XƀƘ >$*pTWTi^e>,w%@K qqӼ<=!G 1zw^W J&_^Ed-f=Q@2;73@2I&p6Pl>@nkC? 5s mz>gjf^ er,^oM »vsnYnՉ9VLGj5,W鰃t-FVhCyO@0js1t }Fz 6Tc!#Y^JVє9DBz]QT=Hx(Vo+Y>(A)~˅Wu;Y# 5*Fj)8f Z+={;ICm{Y;O+ңV2*Zfy[P-RNV MA* fwI/S.7vN%FNRwp 3T*REIX㑁kA.v6dfyE诈NlMsz]Ψكz/Y)rοTR:M 6LmmF(n a00)qTr#QתQV$b%Gf2TJ$F!iBc7W0漹> ~7?j^b-73G DLs,8WqKcP!ʤhto ٤k\)c%Wٟ2kK'"chRl% ],"}~Խxaݻc^<԰luuHIk {$Cr:x!r7e!KF}[\ OCw2]K5V›5s){T@;eUPqAoƊv}bL217L֕hJ6_Id7" 4|h$ 1:m-[qyY#>p<،|ɏǒ-fpRu _yXIZl3 cjcJRKk `}kB='VE.8-p}SMYWqJ,>q-\-, )up{@[JAXe[P͏BK]G5* {pYȤ$dK7*QYE-G儔ȭڑPl/ux3̀jցA;3W cWt6g1wv٘]%+?.b՚94Q4qJ$:b4o4bb 4Ų%4kH__mքԒIl;B_RZQM4НjcJsYkOшڂ3iC @(~ 39H f:*R*@XPIgVg7 Y$ov}u9ry}~A Lʲ?#UKn$Z=5Ȁ gT$@>|N::cCIDRrp@|{6r6oae䏄Ca)K&NS!a`E|TS@lB~2[ -it`~V ̀<R|sKtqe[,Q?^ K|ID0|r5}oCLWR y9;!]K}8(̄ɊiOMw'(:_kB ;_|whJ?sun./Ue!ÄӓOo؜OHNn~(UPJj~SL9(L"Dbq1%~3flMi'r!CrV+_͐]QRs Bŭ= 'Uaɷf9%kn0?}wSV+ ) U^n)p;Sbȵg[q9<^m `0DǓp:1.? VgW(8BB)'in9OyPZ% XƑBphX0- g'˟ z34bT7=|A2aţ^9C H,c' ΃H mNAt;o0E38G%h NJBK9I'˵9#^6 ;٬{V9'tyGQJ7yxkF JsEz@OlY>kP\1%X2A :l%m| P*nkpXq|>BԹh.k5حigkwF$4bSW˓{zf5ɑ-Eh'?2fi,$5hU *K|ޙMBXW{fmɓ(:ٛ7 - h0kmW_Eg@n tmAE:ulגwHɶJg7EHD~g! x(/BB )օHc#o;uOGqͨiLqQǥܳpjϽع?JJ˽)0mjLZAA*r`4 %jhB &g$dvr6877&>%y1j,܂>~n="y93ySГV@ inY-'M?i'\Y*I*qQ7tc=ׂ> x0t Zŀk!528SnG%ƅAÅ` bk Lȍ0z^Mbr<^fkԼLܑCjX~^Xx+m;;ꁈLyݔQ^MX޷WTrq3(>؆6ձw3}0j0}>:wC'rp `eҋQ:(JPwȶGk.ju0A90n?_e oC #}Y5BoPx|@G!M(DA8g8OiW(߿\=<JB3P%1 x"$ LxpDBLGI#q& u⪆c!eK7_7.ou#wD/'^t_^zgI韗8|0;=1k ̗A7z:ާEVLQO-rWpkS,q+o ÏN:V3.yҚhAJݞ +W{g2gTug2?&>wLP\LrOC)yϭu~zrkg^\zQͷg~ѺEy?v@bp-'hk.޹{ӛӣwS}l͹]e-_i Rp>p_$}%v q\D/L:8ߌ5a5 gCzOcބm+O ØP`k>62|mM][d24wq?0^L=?H ^Tn#;>Ci/BOWLTWIS|٠s}^7SYu;gnf^CuMBFoB=7NB׭`teUZsu^W~s'G08Y-{TLwi9וX4}} *^W5eKwoE!9ܣBKҎsQ\Puz4/ZdDjԄhfNeȦ$(V0X%ϩ[`Tţe`LrlBs,Mq 6ǽ}obgḦ&>cK=ltҠz[ǽOwF|jEℚ0o2"n ՔE Џҏ*S- 㡈` d$e3v2i Qf XWS=Z?, \J& ͓ft`%9?W\jqsyxvV7݄ lp٪s*8StijN۹U? ,a,qsH>~`'s=7Cz?~RO{c;FԨTU d^),K|U ٝvV)7'.ު3[ CϷ =)Rt+7ۤvZI!0'0m^֙MP9_T.%5Mx׻әcM=J;{`Qۛ4rn -Tuw F̅m8Vv}zIc,w6SyyT1Kw&5umxBqpR[*N8r%{qA,uY"v,obNsz֝ lؐs sՁyTk3Y=m&{dm&{ mR9[x?Y[S+B-}kKX@4~`7*$+NL`ÙDA+b " :3ݯfDcJVZJG [L-XuB'9{%S\6iKA|L6O6c[PI,~D"JTI'q !(!U6"$H0pĸ\SiObxNDP9=@~GE{/F[+[Oşc=c=B'cLQgG XF`:no!xUݍEÃ]]~wλ$wp,&_a>Gmlwl;Pye@T+48(_mjwVtl=cJbP*Di@0sIΓ,"Z%zΒ,IPJύ *jn"D)OQ:b{ݥK)ڤtGG.;ϒ˴t`_ʭɕJx'!Kxm.E & JX%B03DrY(P\PIĒ8P˓#Ⱥ<='=+p26 ]$z%9֟'dΈSeO/؃k9=[RH߮냼kiBSfl [O .CSByw|v~M2u L/ws8 4Ϯg򮸖wF\<5'rxc-CmD` mR MDDG"/3^.9j&}s= uZt Ml3!§%SJIRFH2#uhIMnD_P1 LCBmY5dn$Y5SsXYk~bTdžPk_cLt1I`b)U#!V>YY_d1IE 6=ϴؓQ6xc U?zr)O\VƳ^G Γz/-G\miz!}bk)Z!H^]WtuCnއ7(CrW`0],j)~Z7/ro+k5biAX9k|d|9OtG[βsh $˽@[M8RqR6~[S)^&+(3Awo瓱LPK Dה#ak@a0!}ȣ(G> NwyQL\_h)c 7H֫$K7.b*sm}[2 ]bƫi\^ c9ѣ&,K:vң? ?~Sب@am"U]U@qSşr61Kwp El؎̓)(וq6n;#YOޑU3xo`7/(s<r0cBtЉ;~\dE&HjS駱[üP62Q@qıj0H7P("w0/TܗZ 5Ou6W<[c{LY-(d;Hs0E!+6u ̂owNPӆa5_1G gLot_{^ \/?Q$g=Y011R] "1X5#̼VLC,KpckYytBZ7JcEE{%XjלBbթaAR%>^*SAvESI+bEПףHX%APB]Ko3kBr3拯aKۯ.!F yG4* ThhL5bns9&@pGS=@a n"qhb2[E-Ex % emei^K]S)597 ٷWG +Gȭ tq=M\g覥{X<= o{yɹnqiK^^ĚOHZ8 5sZhmlUNfy,iI6ԐB1 pxp2q:0AԨv%-B#d]~B9dZ8NZLJS XZd1xIÑ'Uvz{*‘c#@JGX `, Q $H4cT!)Q*SX  Ј0i&dkk+fBsĉZL&.3]-&y<bҨb[Bm.Aa-ճO$M-(wgtyuvb͹SR# }~sVW6MdfLAcUsٌ9JaHѺ>aV`kpC0"uR#"A,X+?"v  T`.S'up"L U Z)&d_o:7AZ𢮴gD+#%5ҵwDy;ZnzOk{A9_n*p爴@jF*mr-|.p*K%=qc v, %5W iR5"4[B96xCz)O\0b$JT[r'.-*Z[\eۧ+{hOILtstCOy4h#vkƐH3[z61ѱ1"'`|3]5y{Vh%탖 \$Fk#vcUDJYEq&OK"1qv^'xa!wZa]+ 2͒=wWr,yjvqWQ)؀ 8Lpp*۰E ,-I8esx^.5'hcuf[ Lw\޻G.WZqĵd4Lį>|xߘ5rNa(i-c!d2}VьYf~iDn+'&mr]CP9Q`.|܇/ _nz,Z0ĤJ0P'[PNSf @ 7Gۧ+ _u8uZ Q."2%\P$c A v|QG?+8b !.yVΛ_KߘtFm\T-f G=ڒ6'muCz)QJXƺjq?1\a 2_Uac7Q;chM3UL:q|8cLXfHH?}qX|>?.eknwR E#dQ0w|c֐g4|#b%S٣LN;&rΣzˣyt`d?mɒWߍ46UMC|Ŝ5!xtgn3S:yK;ZQ8fQ^pHgFsG܎o =MƤH$+'(Iω$yByNp8ƅYzxe?/-ʺjN++yty#F8JIt*t/L`Qex*etq| ]Ēf>Bq)I5^(%!ާ6-kN3D0Qh&>KPFH@GNiAH8yYO3A?9׾#)rSK%aˢEY!y{GZ+ߊ v&:ڈOˆ܆WJ?KyR0b-`/)%((t!V>͇#$W5D:RP#<&F:iFϡ J+M,jl5rbbjV1oޭFipd]spDisV`SOr(wg >ͥk%dD|@ g1=#.pJl#0OUSe80U)Us|eubi42s{Zto5 7X7m0Ɯ!+U O I$5i|n /0Y  ds4ئZ>q"5A$_k%nEnbE=J|+!#1|KoK9|x}Ɨ<ߤߞ`KTw̃qeʹYjA oe翏Œ?NM?{C5? Kw=._걗t+n;vtc'ՠRQLys J+cu )r1!\s u<61TkdH3f+ZʆQo6:,=R#LM',\s酱@Q6D`GTJLze`9N9݅΍7vIuѩIiAAgFY6"@ +6M5Fps2EZXYw= Li8μmn5P"|iꋫUz75]a L0̐pUTUضwH;roPSv``(Rk_c6LB>v߰6RVa^r~,o(Z;cއz]}7?Kmچ0oV>(NhX-@8E^ͥBb酃]ڋ5&Fv`,bmG:jp[ZJ8ۙx۳?oڅu/~Z\?ww?.ˊ`Vs|bb*+#8킌J&%} Qٯc0oW|#z _:.G0<8H)j&ڰ.p1ف1.'V#2IqSUPқu*V#V>paJO kK\gyBP'2XI[R .LC> ,,XMM(! IJܓ:₢0BY iTwgW,TTզbJfp 4Õ]ƒN~H' W pET9+NُjƜ Tz[IDN5pk1')C2:ٯ VQ^{π! fS2Mve_ؗϟ@fs:vo<ӽ=(`nLg݂g?Av=v|uA܈F^tXx&%LSW->I9fs)#};G$X\pYluzNns\\8̻WZJXfYB'?}KRX|9 'z=KbU4Gl`cP*M%L`6̖ ා4/; -yK}ޞ[w;{&?{ǵjv*欭QE5ue/O?tkk&ozޞ<@'5hp#)G^k0:< ɍc5#(x5:,ck^ibYUcacbTlj Mӵ4/pX=j.N kl 0AOTL"""慮gښ۸_aiS¥4R'ۛڳT.\Z̡%a6H"pf(2KH$0B'4GNX2XۤzLWȩ<> \^5tA/%]K>9C$QPk7ד^ N׫w_ELy> CM͜{~uzh?xhT/XxA0-:gvwY+%$7C VE7$ Sd.絑tGIB8;t>D'-Od "{#~ 3sR<лnX+vbnKn4K=c UB?& U mz\]dlҭD(lsr"Wg5rs 7Q\/HF4.jH5X@?ԍOY#޼,Hh]W??7W42=db-V+ 'ݻHVĉf;XqՈ2J Ll+yF]YJQ+\|vGN(ݨn՟IѤί<~_S]Jc%}h4A+̐w-Y}3T~+iBjiOId3$/2\5FJz'Oơ-`aswv0si1YrRYz)0YкdOZ߯;XlJbi.PM^^E8ɓOÑ=\)ur˥psssqtN֮]/dWOX*l•/'VK-OU/~Y%%|MFj,vsm& - ,4YÄ/U,r&.%O zH+='B"?5/<8@U :덛/4=Bk5KZ+DV99:S2 4efr<|Z{:_NRtL >dd9;A!2J+rG BZ)Y)JHbԂDNLa9xH y2jy²"@8x 4[x +/K12hX7hTf5ɘ9=ɞG-CT/pK$Q!8!%ai@"7*DcRvFtP죳63bh,`\7\ r hiɹ*+cdl`d3Kq;I$ JW~GB䄮X6Iy4FR^[?)ˬF.^ז t]\Ώ. / e\xn$ۢw)R8>Q,r/XR"\r GCiiPB'CHT7h<.j'.XBD*KiP'iw#kZOV7W"f6*Qqvsut9rW^R}((PS,.p): EeF$8lvN rt_KU+oiIHR* -Iv0oπo?gv&xfp+VOfGUwucΐս*#*D||n#̖2NcQY㸣SKfNjZr"݉+a%4ȓģ$Q].k䖔9$a%b+#U!GQ4Oܖ|ǦwWZJ?Z)Aޯ6 5ڗn@gYBR.& 45;:Ed¼?opoЩkҩG# CDT I2b uge9,㖜b1/EYk|rx +.9IC:ΤI]`j Ɣ"@GD%pxs:DnφK &b鑼 9WVIX(P"@ ʼn̨d4 @ 6&*4[3gFc&Ҡ glB J)F*o˓3lVh2wP]qSEΏvjk)'.YRŬfTIbFVgTxรSbX^h#%QE.uU*x=6$4D X.ELmr41W& wM淟N>2;*Yؚ##Aq gT:11p)S Cbg 'bG-dF6^+5 ~I} Sו-}:52 ~ű^Ǜg4 yJ/rcϚ4x]q2Ʒ 4xp,oSmzܕ') iwk6? ϠT՛yZػ:k ȕ(оL[*Nj_񄖪ZyL 6 O]B6rƋr~x~;pwf|ϗ4,K_#}7ͧwqB.Lo[X8oѨ O/jM:^4/emF Hyd/])t\j#9]Nh87Lӌ oO@,~'`e&B~J I7t盐i$)aDHgn襊CʢML:eM*]$䜔MvP28DYV5aakb ݆%xSx{Q^ 4/(YgC{eVtE>|;۔uGI]c=N7Wi?H/ rO]- ")2*A?^MƤGe:ۢG7Kd0ixX/Lc]Dʙ6* kyܗ ľr%ѓZJWi 2b7:ˬ@uNB A`usB:2!}TwW=RH&;^ިpj+,, g!BSKR -1nIJYd3#ʙJB]k-ldqsuTKR  4q:Q{kWaɐ MYEO~AfL L)-4*K^7?_ \Mk__z wXOC/JU뻑")T-g( Y+J5KEctVo熚T %oqlA.Irwd(>OnjXRy^˝:=\fj-<7:=|M@hBGلJ!_e6ız@=UY$%< (ldlrfN$ LdF>G]H% R^ I l;V/;F!M&>rݔd 8MZ Wab< @z0?{W#m@n/ @.0}!tly/YJ~L~Y$ZOb񩺬(u~۟mqFq[L4w`p$Ɔ S@=2e]jR2cEF"iL)^Ca,"wk %㸆 ԨIX4vt *h _Nm Y4-=>V1NtR a+  E+J" X(67>QK5 ,̕]˛Ž~{avXrn6e`--~ǵ?ϲneۇk_(녃/.~H;]^-h;ܭ.6?9_q݆bOД2 j~sin_}XǚOzy3OPdZ?4i-x69gV1k1^`*H1\ʺ–|XUB׊srl,tw=UPeU1*idKfypBΟQYЃΟ'|)˿jsYsU@]-uq-N0խO3GO>d.zyT*  %0zZ4(/ksxFά?\KW߭mnw) ݃CVȞRMt&-$l jV SZ ,ݨ\.|}7sXӾ U6j*,6 IDKjvJhWsURd:T-!|/R|N\hQ:"uM8bOR0m!? &`]r!JbXKCƴv9;ea`N֎xga$v۸lߖv3±.z.th- h>d:_I?#d2:/dOCYKD%$k K9(DuQQݱnGR)5DJ;G䜅gqctO2XIZV^WԺ5l;+v" f% v,Z5Y ѱ&fT?oA5loT$cT[#0* )gUCo:=r˞*d ,_SB`/esqImx:A@!H$ p4Y_Ke *F*yAŞU%O}T$T5D-Ds/TWx 6+Ɯ-LYTw`ʙ[9[/{)CNHK<VUQxƬ!ht?3kk-Xȩh49U=iM\q=iMñTȩ:hTr'NOt8xttE$P,Tuf':4LG֖搜 u 2FP? $Ӷc&3Oh*wOK< FߟS4'Ycwv0B<<d1Q05 .\5ȒAJmC%$I MeJ@L$yeYDXO~d-Hxi; "2iBN[TT!]ISSVRƬ \UZ^LnjeGa% |' ` U|}{7];5]Lf+t/58Z2V !vԻS}86G$cs2Ƈ"/RHֿ޸W0V`Щ" }~Z7ʹ~3Lp뻥,(06%n067=hgw3߬ةB^)$^4FP"^N(FFC@.6wSSuqvv,QmW"&LB9MWjwmvÏ?.\.>]_mǔ]"q$r{hJ_b~zy*O}FZ(%lu9RRѢ9 (VR#z~ⷨB̐$)An}*$nLA&:+ c Ff~]O_W oМǵj{՛}K Gvs*Gs8'|S1o8Ma+},a._KODz~Rkj Z0Rw3j㻝YCrCn.On_ {9xﬡb8Ѱ0nKyuW|8FblTqs5(3 "0nL ,.o> ⑃Z%ϋ!) D!DrH잸ggzOGlGA!I[:(͞!Qm][OO-X|xI#hΤ{-)k1XΤF\I4J4pKYHv:hHhxs!z5Ov1Ɵsq)S6 t^*S62ufY-rI/|"?D,0#X7L&sl9, :aPG=(HDgPDt~Qf"dpFIft b[} cV)T GTlAb4^)j,AyS΀D5"h^ީW iL4Y ҂7>W(}>AUtP8+ Z9N!LSVi^raWWء*FR'|)R=*DHS2#(-*yQÊWVrRUOż ?{<X&m]ɺ(|c֐ =Hn !I6dM*>Kk9*O` _e#ViF[S õ֎\瞧Kyz3ÜSǣѯłH08InK,7ԂrKCǤtuE(&H4ΪaakV#9Ha+.k M1Cd=sk=ǜSI3Ò<}yWddPr7)yBy|`"38O5Es<((r qpBHWþL@:4 \߬/ ( I%&2#ȤjQWp gY^kXZ̠mm[#Oiҩ-¦ѡH@>լ/)!XL1ߢcgPE<`i>: M4թ)2f_IC7.z |ht>dƞ_Ӻ&p+&e1xmN\L+up6i CA>yXsdY$[)I0ԈZUlYYJh-brU!( k[Z6:Ej˜P?0ʟhcIR0.յk5 ptk#xn*W8ħyBry<':h@"&Xڛh]pQ~}\>HN-n& VX_n\Ym_D[zjE]^\zfl\{5g϶nnZ֭ ]ړjۀcd ~yrvb/+L7HF#[r{V_?.V: : JB7 []/`ocE>ӿ~a,n^}א'kc=)ɭUc!/DlW`6{t2wK tRŻM udnn9,䅛hMQ"fOK(FpBA~wjTѼ[@c[ y&bS̿QgR11otn]o: *ۆn #[ y&bSܙ&-fvAV˵)*e` r'~@74qt J=,R[wWAz)ߦ?ZL12vW &HkVhH!jiՆҶSsCRle5*Pe13fvhQ C)Vr H?;ĸL+kQc.'X2D-՘,?{Wȍ/{]"EdrY$׌6=kٓ$m[[ dOUd\ȶA jɞ1$UhRaZfQX`FդT6RvPFCNlP)IU&`lEG7F?L=>sT@DjG3rn55"~lyFu5.TiCZ\=J7/o%do׷.nC\?h~of,`|gGo?~u¶zazs~Ξl֎g?#!f?7 w.|sqW6!Sd=$ZkNF7qMzre6SnŸ΄TyeѸzvQ̜[R*;֩ #Q BIM'$.}Z.w8]\$tQX$txURϥ qL[FȽ|3QVܯ\l~r2v_̢[LCI* tЎ26tT6Ȍfea"̸/N3pBt56*Ji4Nyѥby횭EWL N l`CvC66& <]+uwW2 ;rTboCSzGPk8D@UwdY;Q(I0Y\Cy] c[)"J@I]#R1=EX:.K#M 3%>{AJ{uJۨR$- DoSILY3R_tE ϊǧ,Q$cm!T{,\Q+(›zq06ROV' *D/t|dx@Sȼ=e'VG/ s%!`|NhdQ3"3 N EݠN}R59h8lr>D  rXǠƞ3x*(DZwA+SZ,5NX#JrJgd޻YxWY<>hi$AG \JA֩Dgx Ҟ)!W%uQi]Hd`%ϱZ` 9} !r 4g-YgW吁V\C`2w9dUlE +bj3`̌" P(/ma"JIZNIkKMl'=pa(DN ׵YoeU֬w7_Nzm޷A,a c~i{ #1=ePS ػ%'6@.{|<}LNndA,l cI9o3&D0NXbǖ؛3(B6su 3qr;Br>z_ҧpv/1~11BkN Yc`5ʾ8=J6#$}0M I7 V3fYtBj8=]HȆxtlt{N '$Hr! vL9{/!e|S,PRANk'bI*$S, K`54[E654)d~3'͉ g<:q`1&|(/t˼AHMK{TGi/{ACy t{s3+  1xC# ֵN!䜵2X4+)Adه)iO8ʂ#u(cQ-g^"e%1F{7XM!z+aG_@ᣓ+v.)]pGSr1>؛X-jgfZlVz)%0F-+7muef*DSچkR#e$'I@n+xzF, V3@X/}'.]Ayzl^)zp{*k|~\$^v bo?.3(Zf hP/s Jܡ ˌ(3u:b&9NE(clT/J*e6AfOQ vl6ih ͮҫltEgwϤ:Qu`{z6RTO!e6g bJQ~ďb[M-`$2ɗfK XSStUa!˾)ylwߪ\KmGX?z'U^b (vSE421α8ö`pźV/+aS6+aS6źoupWݶlj[lj[VXםmPfNStz}sPNvXTkWFyus.7Wu%hyc#BYKĮ9G u sVGavt&l!er>llK~]뛫zgO׳gC\MRo.(hX3]^i$1K^̦w؝&<,\Z)Șؾ.eQ>c:dquUT$1()G[QBD'a[/˜aؖp9(֌1o V'T1KN`l[ \2^(ad JII1<\h0dD{HA߇o|IhaQZL/Y*B$bޓ?1\d:{Ww^ʩ'~mz6ߟNz m|S  `R"Phci2BcV)AdA#K2Jl>GH n.-SL3M^ s?2Z}DlG^M%2m$e^QdJ(4ozȞ(%a5@a)S$m2r"eQ꒓c!5 Eհu5`aӦFB' Dq5\Z[j, /AId-l|Z[Gq[P!&u2>/l,"BрCɂMH&A{]Ï]?iX|u_ 4)D~y`Y#[HRd?x+dQ+99QbFLuMӦT i0f% |$qta^GqxlΆ~Z'/[60.=: iHtK%YSn%X$ypÓRJm֭ tm\hf]14&OޘkߊWXHYb+-ZjVɹݴkѾnf)M ?ǍY/ R }I|݃5Kd{h4cIXѴ.-ݺ2{EVEօŪjd`SüvLR R`3empMRr:]{鯿}zh!)s\*{EًzFmBb2lOC Qiu]v~WeW͐EԹuOԀf˞WWC]QREOf;~%6!W=JtdtD"DLbOT+"hU. 2BkB42-nidUM#_^TF`ʦJAf# Ax=*-:'Yjk"Nq)p;psM7<@N` u_% jQB唡\Uխ둩ՉTd*?g\THa2;|9;r{#ƛrJ  +$1o|l:ZఝvZo{#Mµ .t$hEiV}b%67>BQ. TmC' b.;p.M(HExv ӱU1NQ»mP mַN\d@+ `@ TBbR<v8\`:8rƵ5Ě ڌԭՄq'6q4?I= FwzM9VwonB(y2Bi"Jr5KwtT9DF3^ݭ{vPAQF) ~:4|0yN9)y?.L{?Nnon(AAWJKGc*Z hLZ_1oY".Ɠ5Jg<π~1dvg66f(n\xG>PgW׉fD~.H[;ہI}h/Hߋ(f8 ӛngt=*vc-59 ;X8;_D>>N46ʫd/MHEkRS ?c0;X2cBl;"{ؕhP'2 d:"H,WDlYagXFq=G),gp2i_xKdt!ۂ(26z3zIrg@ygscINsrs{ЇFH!0>~\ݨI`S°TP[ZKC&a4h~ uG 4vmҬЂpvh6\u5{Qgf82 U^H#Kv_yBtt=OJ>_'g&jd'׊VLS Sڐ H)ep:ڎ+Y&)z慔Kθ`B]D.ߧ8tԤo-!/prdM4$7OY$Uq鬊_[WY";4W5xd}:#2@#e_D ~-\%NI˧ %P`\r^HVMU$)YMڕъ3Eef\qq//VyŠ-͖rv/..»"+/•:/3&ǝqBPsD:SYQA0;0BS|fߒ/w|8j DEu V;.ݿ, ROzyO>T:V}nN2՘fTh u~4y~b|y"Ӻ9PD.\x{w?+&~g|2`nj2.B}f1N9%[]IbŧN2SQLpHZ.E.v2|*p6eky zi\b쓻|Vhq?RO.\3!`^ͪCyیի uCLWwhzj= _!|(ߩo3G* $A /B\5mފWUB_QyWЮV/VK}jY3ԑ+@7/8^yϣP= Ѝr8l \}vQ֨ԚuF^]\wQpEuW\z UD:xBt!%eZn$ediZHQ4R%4Q ״nA;5VvߩC+P Bh;½jZ{Pڛhos=5iݍj5螲T]?9]TJQi~~ʇJ_h5A )5@t->'IWq֐KR^.5KՅi݉Op0N.狱aj$_z+/񷏿}t p=Ox>O],Otx=~Fu:? FlA}7g| Gk*+љ2.߻U İM-Qɶ-y[h86 c0v^,ܹNNô2@ω=kB%֟;: 86 aj8Kg%>zG?1m4pὝ~n'9?)p ϫSLXsL#ug-:{w-dJx-&ɔkDXO8Y(;/3yNrSLK&b `Ůka$%c r!Ya, x 4 Ksq|+8#TUT3ϔ*sxXLJf<7UL"0ǛC߇OaIui267%f׳~zKT-_'Q*{A9jIRW]׿' @U'xZLp#XIM KV-SaWM2jam  $(7z>)PTZ1Ђs7bʸv>$胈E{F #zvaŌCBt++VH]J 48~\- )eI-7ST{,iq]o==M}<oD6~|>= EPk>[ong=^}#7>0zv0 ..f'?~ 7{PXB!m¢K쐧JsDFS+rEn EG+G5ݦo$|p*WLDΉdT)Gj T47FP;y'dUٯȌ$Ɍ[̠\TX#[T hW,jA2ȆwtZÄvg9 ۏ_|i$і]=}BVq29By*u,)3134Q-]nS $R3dE?oG/lG[!Ś׃GLDWvy!19$:ƅ$8~Hg Q(oB0;0B!vb5x w>bNvm6zΏ{Q_&[h]S2 dUA:=7xQ<I6(>7 Tc==ߕj%4vLCLJ?YzX_313$~c%=׸ŷ8yd20zݧ=[XQO+jv~X"z~E3$iȌvѩC#fn$ ɏӯ"27-xeM(U޵q+2잶xyI|IvCbd薙Q }sp43ӒAlj"Udr=9嘎E'm5A V}Z(Tڗ~JE~vAY˪l]AQ] m˜`eA"|8u9x9n;觻(ykQé69(L5UkuO9l^G+}3I-*Mž^|inI<2_%i"eh֯TMv| X Ӭ҂JYG6OFۿyW%#׎r̰uibf:j=0MQ)ZI9|3Po·jL#9!A2,kqEސ!)*VR2Ƞ-'!evbԔTx™hv61 XFp`NqO)r!^qgsK˜Z5B\ @&0k3>qpB] ⬙Dds4K- "Njm|/'ǤJ޻2r~/q8Mo-QA hZ+jEg\_efeYJ0ϏM>Ƙ!gjdhz+R?f9SZF#)3͏,Ljr|UٙpUV {P[`UP^Vt(CC*zqFP#mCrP['֦#h 3@uyM,npj*Z@CR:`y{W>| /FU!298>t3?p$Ec- %:=.D ӓB,+tBW|1+{ PsM]4Ay[c4ࣃywZk8&.LEjEi,`cm&^)8 @tT,)5{W!TqԐJ+)@[jʏǦ\K N{E#9\W`-43Q{:5fX qRRnøEB^GLˤ(n퀭NonG2*]=]@19ډGD]/t9N/GIII{)ulMԨQ[ 'iISL[ݝNDr\ɂ+%p+|4~w/[F;~"bƣj#y:8֙[h)ӽnx~$ӝk70fTa]$T,zq5;9#WZ eD0bD*<,iuj$;Տ N1bx@~Y*x*~?KTfIqYi5Hov%o9Ұ7= ʴ{Ƌ -2| C80Z r?OSR%{#<pAPl0u|IJ[Կ^<ªD_j>f9Nce__}h#Ҭ4 Uֽ=:x)=Շ䥹OB F)lCܓ<ɚPl%IO{>{$قzh+BW#5I&4z5]gƎ*S\qU n1AZ9g$b20e6^m%#Q|zJ2%Bl(g |6 "d86aCI ~pVzxDYp苴p4VFt Z_ b]R6eE*rx-hܪ Wx[،(KM@pbblU ΑI(kB rq>'4_ $_Kg|* H:Cg'x)@)K<:xZj ƝB=X V&R  3FAj y9,ʹC1A!3z%JQmA3.D.Hk]MAH!(aǼF#S [r̄lܰ=W{+vo/F% #KDP[g ۆX0Loa,T>֌FͰiքk88Wm$"V:!u^ 12Iic`s@12.\9UNabS#xVr퍑>X'u$c?" ^DN~,]:i '\^EwOΦwuv]Mv?M𹾽rϣrvz7esA%eb`{aO-l7Wxnr;:NՃ܁w3=竦ϣFU]:O.d1>7w =צURZնHaM*=$њZj靛)$quoIEH, icD[ !^eh&GFG͝__EY8Ƙrbw=j,6ς{nkJ«|`yg5-hllp7JiQ~Y׶G< ӦA 4& , V'T&<`+ #xW6IhAҒ8"l;[_wgDwg7dԻOf['Qt邔rB,I1m:ѷu$y獒OkW(}{̽8JI׿L0Nn:p( [g&jì1Z  )- jaTV_*L?.jYg3pG/o>N_Ϫi,RǟL{C:dI'}"}$ϗϴsL1qyרEwc̻6K>ȃNT ^apŸfT"Q]3bl6Jlkf)E|^'e:ShI_δ9b"[WL$Y .(E$@aIq5}xA@()̏nFe,VWWr틀{Δ\ZOZ1AHv>ڰ:2S!(`8Y^p%%:k,9y!nmFV̹N! D N !=$ZhF2`$L{py`ftC%"k n L h:9"qMW rXA:`5 9zuM\$)1B!~L.Պt%07im -*tļ/6耊48Gjِ )GI)A){''Æbo ӆ P"oH86ԇg3?D-,%]l+gs@?:"&0]t&0mv{­ܙTCju#Yz؟HE{^3Uێ\K3!s-#Z"h͢a|h/U W[.BdyQQ\wl*^H4I9^tL"[LJioLIKj iBߢ-Jqs WZS1DxC+',~&)9byD"C|Tad@/F!J@9#)^1.1Uz‘dG}Ocyؒ\dC]KL !K PC/s VňyC* R2J0A9c<-TEJcyO62C??NoTE~iu:9BMz>|:ЈWU*C qeW~l>bɦIfk&o'?]^G'䳹\&f_׋/rd@El"6v6=hKȪ>N:BQL_ ̾}g:zywcQ]}ۻQENiUĿf5AAfS Q5eC^1zJ*Ŵ+zo)[H[ZQ:&m%8CC QIU-v5{B:p]Lt$9Hh>b/(KpH@h&]R LePCQ꽔³iJ55Ss+/*Eil-X0628IbRrõ" r`s6b -LFS3KH(ArBF13a2] R2e BQcd #! VXkI!33,3ayj,gѩ,{|'ĔE%\I9jmBj%}G`H IkqN,%E,\̧Czww6rحOVOO2fxr-:}~2DG&~veV7Qܚ6ޝBJ$\Ng?{WHQ/{J}Pb1i L&ӥhKc7(-e$\r"HF/Ňw[<9COYO*=2jȶ1Rg `&ݖbX(Dq!yĢA[_rX`FʽSk0- FO``Zi\[dñQͨ8`Yf2MԂq GJ ̩54 8y*QDf+,lҬTD<=,sNoweNvdy5"ER@+鲨3 hBX[Ѫ /h&K+=Kc<7R(:'R7>%ʯ[ "`H4t'"iew!.E72&KO4)5ZI'`\+fr} (}tb,}Ek:+)NXM+)!x3^a6>r,S$r4GׇxD"U/J! x&/L11rvz jZ!#EZhۺ[22`~ƿ|a ~sC̅!($RZ2q6H݃ `#ƅJfO#-g;w6ϣ(Ȟ_ )I$/tD9UV3L)4%JeA r LLCI ]p*i6Y)R c9wyu~uXQXL$3Ya,~ﭻXַf\ۆtA*Ý3TF aEB26*$ /$IwХeN+ qYkHiʈDBT"l` 8TBxyUnSng)p\ov{ I<_Wxg; wLr#_W_x(5 -/6R>;ǻ[m#3?v켽[yUg}Ǣaݲ<^=2/v<ǕR߱wovG+9:岁:wC6F t;]BE>?x6Qno^u]LM4ɦƂ#nĘN;nx#1x-[ y&ݦXegt %e<8,XATҮkctaOK;K"ĔWبgWccH'O$dSpAg`Gѓ|«Uܪv5v5ʪ4E!z?'2|k5O$} iEaހDiArWyb:Y1*W\&"oSTu #e+1it0׹##`YQͪ-Y;NcN?>o"An,/vWKQ`>i5 f?[e8hwpլ7CƨBsn-U{PI;: hZ#WP *;XD\ ?" (Im:W-V;  MA@*22s\T@s+JbS)E/cQ4HCѕhI--/m|m+aijؐb{h2IVDzZ*b`NZHdpE8ab#YZbXQUQj0|Iv̊N|u8]4"N%mP!- vĜ)G9U B8+QKWx+`qLYI +I Uj,Il$3k|1]AK-bĮ|*rs~ SMOy]3oK('PzzAj(,mX鋩5#49''9gXM5s!z*=m7{"UsYe@wSVRFBR5i[b kFJ["SJ#lċVh1^!vДEܚ\lhk鶕aN$ľ HO0H1LVׅ~J;Tbt% =f~}Zo&"T͈,>7edW>E592'>i¼EsDW.0B)%)*#'ea%& +ia {8B&̓K|Ox#'GRŧy`dw%qĎ2%BKIC q慜c*$L:ٶ4XNF~+@G C6u1 b_\w&h79:RP=B򤎷6_I^~SAj`$ EƂ } cM::(;jc7sA%*y(&AVΉ/ TQYUv cYYJ`*NVТ,.)+0&'и Q@f+,,wY&+hţ.bX ᨜8QFx;w??BNK,(X Zǚ̴vt&}bDYkh[g[gw>V[gy%V` W߄WRsxg;M?\{'nO3,6Gݮ%x9h. f R',EsF`Y<Ɩ9ulA Z#C^"ax)Uaβ0Hp*$qFVLH*"K+ap1EH0l[r_~[rwZ ̓"'DI9E6=X29=ҲLZf}GZ`VTL, Ҳ)Bdؾו'㉂ǽTx8*xhKIp'nZ#O\ `Nl% ŞxOBų 4ckj~p=ͪ!&/WnLkj2d|2h$ApslPFP,HKT !TIkvHB3$ t6q3*iN+K[$jΓKkL~F/LcRkaѼj=9y$+uAC\{1 As߃z!jJ]!Ds>PÃOyR i#'.t\930)r(;23"4R˶z6j$X5&uOԒ`?JM䓨䕈ATw51T#5\%dJ^,䕛hMQ2phvu S11wۂ:h-.rva!Dl*8m$fT bL'mfͼNlvKgrrX+7$z즂s1&bU2G”$UaMNЋšB^v)fYȺ*"xSx˓bqIC-y$w?ЗL3&Kse., *v>yDܶ1~p^='vj+Ͼ闟aF1rC|3RjT"DNtbv_}1u@ϕ`RDZTV$uJRFjq # p: ¢e)jnר"01ֺw + r䝤E}4VOLN7pЗ%i2@9pB>WN7$.wF%N 1JGc:YF`SZ]jp&)(9ԡu2Z1n޹ L"ʍ+j !1/Vf ME 5a)\x-_E͟^/+߭7 +Xxl,*fvƉbQcNpt"3)xʗC +u$b+B31":_R)pw|B~sD؀qɟdCri{9raݞ.xz(kjLp;9]DD"MȐãijSB9/ V&?eCD+|ڡ.pO0 iZd6W掗W5[Ф [N wbs+)@s[rSOx6{T4<* ̨E!&9 u@kQn[~cr% =r?YRrOo;= hcE$#&Kŋ [6Y³N|tƓLݜ]|e̗mvc֟>;lnϗ>&)anǥ|L :;/`9{7,HHX连c!+fdzGg郗Z.q[I_6ipY'u$;SLeR.=Ȓ$${*}.DIx<6htG~61|7}{Ai>}*bp&}VQI쥢R;LZ&Ih*m G.&*bJoO$uݻeF""i/- 0ĶʵE']ld*d FW,E5T ߫"GɷAHo/+6)ϼct5;M!/J:blQ$b;y- g?{Ӻ9/S蓗 |âdz7o0,¯M(C DlD܎_O?OC|1ä\'oξ~yM;;-;0 N[kXuk>|[V[; YA؀j U<@+EbBXbn#Kဝ!GBXgZ Zb9u%@"alrq R! &^F udA B;P=DRkJygra.X6.uIeqm#K#J%.VydQB59v !|.!ܘ?B5ߗ.u6J.=F"|.qCH+IP|=[~B, _/||;GSz~t6_H|߄-&:{D @6Dxr MY"t:B k%z- 74>6HJ A2 Ҝut)"T--ȲdX?YQL"ח&6}b7F}kR ߉?\<U;kv76;AشR f%$Nb\c`#a/1:)ei"L<S0V2Z`_z/7S ]p4 ]1ry"7m#9L<#{5ͭ[F+DBTtFd^qf mrM>Նyͅx'ZIujVKt.Br7p7F*̍RXvʘ2ԱH FӘF:vRI"¤1|:KM 6d 4c/ =KD,\z߇vjo*İNSgm'y/OpB ,^B&ߦ = ~G""ǫ3-~< aUVTPH髖R^0Հ#27)ߦ"_ ~4HP (ucj{~}9P+cBQg}X;JՎx{ɸ@usZG\Fa N/xՑ7X)1<ws2&8_U%|b2F1uVoq+!,2F#0sr(,E[ C,.&௣(˂(cX/zYmMfgs;4B!s6\CX?]?R2^ svwt{W$Gj>bsK\}?XC,}<)V,2S*n ^l8 a,FkF?foql ĕ6=^YWuwNyc@ȌCYCO+R񻌶& چneL]R ȝƖMƆ5e8^iE vz>/'TdNucoUJkFz;: 9Ic/oex1͛kh,4Ⱥ9^M]Xe6RzMۗ6efyHWx1oA]" C{jF/djyWЌ\>x>F.)8 A7 6m/6juo?Ķkf'qU,]í0C\İelL츛:~ci̹^eiiٗBfNUnHn'WNHʿiTQM p:vѨ2rUKWA F0]$8 ,D XLZ`#`ƚzM5>~U6WZsw0Rw-‘9zػei} ʾ%(\RGΦeuw>}P{ V҄zwwf >X %@Fաh-ݮQ ıJ:Jiע7'z6k35)ʐ+Q\xu9OFWHR<^5 E ~Y͘um-}!+LTiXQDEĥAL+'cbYD6.$&^8=>jBB&nT+j(~j3E1G$unSoq ]Ag HߓCx<𷝼0`/nXPcx?z UߑR&tw?K> g΄Ofue.S kj9Ý<ʴt@Mh%ʜuu)ڶn,tuAѩ2T֬[nuHW.O*Yu4!?֐>vĆ6PJ1 !It&֔,m?viIGf}w(aۄ1h_>F+ \Q܇VFUjT@eZaOi]#]]݇Jl8~y2`vv&yG 9luNe YPrfJ2hg24siL!;vû[^_([!&yֻ x~ %SS?? ,N4^wի jmZ?g[H2F 8LwPƏ/SSHM~2޸tTVJmlkRFC|l^FˬҢiLIY'ʑdTth GΖ}@H)HLFZ5ݪw2KMD2?TN'#4hP{9+8RJsoj 1X_Ans(2jRHAO1i{eZ%݇_e[5BL8\2pb7N gbžr2 ,svJcNWP{8)qv Y^vhti;^K;]*"[&m'| XOL.NJ=E9_SlQ+NUl-  YZP[ l"Z"dR*tBQM:ߢt`Xᗓ V'f[ a[$ZtH!xs#]5hO 3[N,e8[vb,]Ė+v$>["ڂuo(g,6#vn4,jFz; ĸ6t'f/Boq'Ý/Je{a(f`tW5gІUc 6*W~j.5qC_T7fG+ :_U7/ڜѾ@cr `My잷^d/;X~2֗m5 a}ɳ>=k9$UgW5X=Qg|rC0&DR$~9?w8D]J"q0:S/%&GmWh 城~[2uS{i<F3`-v+Yam{1gnX+V4^ QbGӳ" wMn]eh`6+-WYiJavMOnVz4+JQƬ]%' 0oQr$߽( Q. ⑚E.9KhƗcِ=ueڐG!eβf UrurXkBԦ%ʊ#kAX01[9uz} # +֕:J1!!=-8ot%'LOر~B4?@@(}hq_CmK,oFFqLO({M#1sġK˄˰/nQa:;6z:u֨HX\u;ÑݹinNk3N0mٺX$ lTvm}n\?yfp)t }-F[a8Z΢XŰ֘EIni$ 2sl$ mutoTMd~)Aُb?wڧZ-[ܮ  !U &@L4c+&1 °BKCW6(bq"9ʉ+n=1qie$ 3`hUx3yQ8]OLrrÕ5SIw@+l8QWd -7ߣ<%Pbq߳+/E08o nbS? $Tv޸]b?D;gHY6ٰ~][o\9r+^b4dV 0.eױ`Id&Omtv7$d4ǪbHV 1O|1׸~_#jG鈦G2d3coJis(%yG2-[vuS6T9f~QX8";6Bƛ̗58VNG;ytߐ;p |;q#a7v0ΙZ_MHǢغVv c}`JcX0h-Q\y*<`mr*xZj& ,GHT/G'9V&goԳ)շ{MLen[OI࠺S>]L7msJt!8^Ϗ(_s0uI{ tAf .y?֣߳+ Bǒȴ>!x, BG͋pJ?|P)Dǒ3HGgpyυo|ʾ6BGr K]^neju ~1W4<)[3agճˋQeݽ{ 4\gJJd*$%5u9k*jd@o}6?-y/(/z~rWVJtOwE;n^BeWGJp0 zMn^vr-KG Ub+8UyR ;Cljޥh"PutL}$>ov/Kׯ.3;rv23K~ǢPe*pC]mXgU(BdZu^꓏Ut[r\ dvY!W12xG%l5 1.L}WFg "Uހʺu֍GJ)3bcM9jGbEL|X۟s;ˑ|ir͛d)x"8}{wKwè?wF>-Gi9$ ^'5DRuMlQNWf|&ZL$c/H]8=Y|}qڛlO:88ݲ@#:MWn5BSfdfYyH0k526;8\zup}x7Gx}ys{6= =}R>&~ + ,p8?׋A~!_/t:97˭j NU|TwSs3,<_Yeww$oWn%XeI7/h4HN "3=LA}FԊ'9]5qt!竨=,oǨ R:3|_'@}$|8҂b ֬aݻ5[s'Auva=Be4}P'H>):]`T5fLT+m8"3f{7"q䥻Xu]DJxQ!5,׿][ &\mpsAS!u<;M?t\|s$C8? kguAr8 y\;Ԟ!v"RTf*&7P*yHy\[F7Inoa1f4a+sPK QPhDl86f9&O]DV,F0x\MI6>3ºsw2x.r|;Ve "We ĸ̟.S 2?Dv{sލTZm0a.M _LLd.). ssp %::{i΢Zhx{aV(jJyxHʐńÓ37vD#ɬ 5d!@1uՍ#KU6^"Ny*jQuB#U9-g!?U @lM n`R) fQ5bZBƶ Έ(9nwI]jIdc# -VHFXHMհYc5SCb;j!,N5T$i`9FC9YrMJc^SkXM PXG5ΉI#\-q $bZ#_cQzB$\uߔؕS rEkA-+d (pdE5A !SmM3!Y_^Ae&iy" (`ݝ 8U ~yIz+]1>"4\|%=oB'18ABF lu*ūKA:c՞'%a(9l͹ <ĎwmpR;.fl %2ҷ@(R o Hу*C&;:*Yl XTs,l X-kLA>o%Eju*#l4Rm-~%Tq.FUijV+v*;JPTQ r+A/-XcZP|BG&`hW`LjZ#RHo"y -j fw!ջ~ֿ B+S j9ӯ@@jAlIwU硸,l yy(=ŵt)D<>QN [<9Dz"//~WJlԩ1<;@6̝ >/@d0QBTqeu mf uR2r<z)yV%0U.Y{RrǨ'w)e҇PZ]xcqG)/vORzRJ#˺yӛ%݆ vseذB8 Ԙ^޿cl8 CD.du6<šFc3=;ns]"!vz,備kPh[EJ/u/.rΞ96h3 9rJRSu&S=@tN7VRNδf(8q#GLoN QQ5'lUj%q58̋tCWQ <3H|wĶ&?{Ƒl@_f@`9AB?%%[=E=6QUՙ9HKԒ!ϛX+%I<֨%/rϛ屔rY1YJQKk])~,e$>|,e$ J>kL׋݂'R2Q H~]A/0RXtsf%xr:Q$J!Iy 4]Fʝ/<vp,',{s~Vgo/>;T)2Ӌ9nED#_R.SY"H2ZXb'2Ss`N^hD,r?~TZo̽ƫ֨I&jRg󾗆K/K)(.8#CK'ZN &3;/=йg\O {fUD Q t= Wyqw]@2Ld*{Q2XHY2Iw`fy̙-rZ4t >BQRv(ǴQR끶Al)†5RQ,Qo:>gvm03B˸Qlå.9Ji5?X&溨}"]`T,2cKQk¦ݢd|ӵRIm,T"DxUThaCiSIIf`SN`^f\1SIwǑڃ?͕ftNTQze2ia}X' jw\d8m1S  N蜖$`lԂhc[l4i"5Z+W0^`  DiѤjM۵,3RT[`E|ւB 4G؃@1 ֘ZCSe>hdvYsOB whq`ER#F]BRyeDzE E"%l#Z#0BW\9TdTQNQSڱ,[pHE RE+1jrDu B`#6SK6Ƒ{pJf8t BE*c(̇H,hfw0)]  A #!^ \K%09dfQ:eUڻcDFB4%yAYwGo@+؅X,ڎ9re7l H7f[- BL$7F箢֙@bG7$[''-MPMuq1+bF 3"W*xO q>w Sodll0lX2")woN׋ݿTQvn8 S7;[zdspsx0/~ @IS ߮xLeFUq˻i5Cpr../%xaB-2`-ץJ,q1Ri#˭~w'oFe(5N)]9i aF_!=+$a[sa6+U)r: xފ7I+ WZzaiկ&^ vӭ3ȮNkE5nY/^"%φoCA]{\38Tuo޿X]-:S.}`Yn]~a,L GBZ|s L$+~3;_u+pt0ciSP?(#O#O#O#O\6I2X+ :iy" %Fe=$x}`Hq*)jBKZGG";>%R_]_tTL5 1w4mm@wyh/_aGf ioTá#eo_\'75gF|wW2l"ݭ!4;eR)lDz)`0|ZKQE<^M|8F]D3zhH%'fugQ)z~uv! L_iD~-~ q@:g+039`0=_UKE tqA:(J[J1vԘ}aSQuf g qocGMno6D7]|Lntůqh| :2'#BR2/Bĩ PDs"𕇙 .2䴙Atd u_NIsa2githr'¯By9&JuB׏&O;u^|HOËtV2]F(l.\-Y͗Zy3`Y; Rnno!>HQ<(FfV}4S>fT%Q!h = Ǚ182YٷFiMOyu9/*GAf0*+! /`xq`y8.k8eX9HaNN1I˃OFO&vZ/leen:'j^]La=Aǘ Pk;JeǦrrtxsZMc%*WL-Vsk>Κnnh Ssvn."fF'''E<޷p ,+*-I~ fz2m~o'$~WaxQRBy5(qW7p~CXlFsxnfI[}X[kq=4z- Ls{m9^:--G0aq1-b7; ABxԩs‘\1,􊻶q>|@_6;_*&H3{=\{Rawh-@hޮ =wtZ|k:wg 9[?2j .Lb)YSZ;{ԞGh48\b-#q&/|vȃ#̝ewuNX O:JI_4=շBqԧ)$}!J>j櫷,`ƫ'~jP5T)!rvfwhzDU96.Щ|8='0Ӻ?9Epzzwj{~DU961$0Iu!r)VW|_)ǜ֖rGuTo:K%P 2hW:fP3z&)LI% Z [kiau곥s*v*R v(pA 5n(ۇ9;l~CyцWbn& w9?mTe`]OUw6}s+[4 5'8"ū7ނofN}y.U U>U#RP)]:F-* R#(.F3L4B9[ԝ~:(I](c6EU!j/PR: 0]ӱEb_?WXؔY`YQuI E'yE=6o~\,i 0 ɤ8ZXª_iRn;²btQB>rU2IJse]Wg]q%Ui@I%ڳǞz4W= -Tedj=ietӅ@+蠝LJeЄ\""9`I׿G+%%Bx! 9<̩Ţ891!X^4˜BrB fJh&q-,:2*xoznfA8ZxjV,B,R462VmCbE{; 3B4Bji)9ۨvDG(ttA\Jt"Cﷲ\XחCJʹk=&ܚrҲHSX;50RK^MZ~8Bc,08Bk%S}K")wMZk0 2[iF郝}|uk4cv`g(Llߚ)^a$pM%%ҔSF_+7uE2 '5L'O,oIНK,}lxT [=n UYShm۵i2FoTM -q8vx3-k)!Joاr,]fS0LțGo=yutM窀PU܃oCWU]b"GJju2Ќ]8{``AښXډrE/U|%5u.֊>4q@X_J\d,PgKgR ]-%uQ1-OѬxˠ`ȇr6$]ueNU;MV4YӦUk^%{q$Q.P:rǠ=WB-.X9Qe_E Fsoʷr&ſ\]#:OD|D0FF&Y  c&A5gF#*E:*wn}b&e)f5xe˧_>9>]Bqg%QR`{%] n@VÈ \(˧IX E=% Z.QKw]BRI +#˴l жwh Y4v_ھڼC鹁)ۡEoKQ3(PJ$g^E{?mdّ'%ۑ>GYzp8C>PgX$X p %IuN-y@'tɅ# ̢lHzdPicXBF'#(\J.b(>v.`BDb-^^xbwGyqnL"vY9Q(Z }KFaM? dc7ր 6Wva⁊}E@܏ 6 I.Q~@$8iA^P(NlXbw ģ*l4H<"'r}@] cx{0Ӄm^C\{/-Kou(BWҧOv@o9~xwfmn4FUz_rV7ܱppRWrXg~]>̗ѿ!G)AfQQ(a̦.o8Mwo@gF%韢rR㮣N73߭q>Hnq1^RP8+ t4m/)x` bC-hc؁4^bIcXq9oZw5  &B\u-~9OjAuԂp`s$1\n^![]!ZEym 1(-*l}=n q=HAliۿ[;m=$4K!oN~?0k\E( 3i-̉`I[jF E;.w؂[}\k?~]PDAC!{}Wۀ_m}sݴ^%t*QSQ"9F~$)O#HebtM,d,T51bmoi7u{wE 7ܹ!:C"YͿϷǧH5':xym-Ʊ[i~|VuN;{_)8>t/zkNѰ r.dk{ZJu0%[bN|)؇HT $a,"%(%d*I_Y(KюI: zo#.lrsYA Р#"@ 5 xtɰW߉r; 2/XI6)m@%vY'`[*UB!F}u!Μy˕tc^o?.F+ew-lμ\JW$~LY>|T $# ݩkƹREcG*|h[xu^ԆsU9C~rV/K^ Z +%FTNɳYU*y}PRȨM("!d%\3E ;E_Ga;; W6|}7J|(+7,XgV '?j귏m[_$&P kx ). z#5S,rC[ 'nـ$CI}쮡>J k}9PdϰzYw^>$8%.|s0K6`y8QO%Q5'jh_nϝC]Gn O=PX ܜo)KĤ3Hv,ɎG(P:G:qduޝ> Gz8c-4O]OwkF E{9'~9;?]n%߮㑵gXCzz=@ EcnXc"" F;Mr. &'L}ڞFy~*˓WЉЙ͜& #ʓwxc# 8)IR )ݬ/cR`Ucqbm'E.y"CED(`<]px:X]'{i\{w r"^AWaN6՚-6..f\J~a OLtS>/y CPn.#DVb LQoa'9PALGS2$>Zdt`AIM)ZIp0=¡%EEZY-,4D"vLTv %h) 2g9hq[u7v(P:Ҳ9M'd2#SҢJրӽ˫ͼ&E^nCĺߺFtx@P@FIGz%@ Vy0#\(Ηԉϸ -sebA ˔/>RO6KXNu]ؚ&q,*c(3|)8 90hqu<"z"?[Z/ 3뢊 b736f6 H̑`墉:{3/ )t41_R.bw:n}GJ<ϔ!wDTV+z@4،rC)a*<8h@ܖZQK>1*fy&kP &('48_]-ikh)}[_|EvٗԏGLm-ʜĽԓRPdƒ90{|{Fj>.| 6 !:.I3RED.*Ӎ|ت$ݨ_KU߫Y`eìIc[p ;20}޺(VcBAB&L7i^ Z=_5݇ɎDrBOc“*NI <Z='QrJ⟿ϔ! Q 泴 \<}8;֜B5\Ӽ~7W+Ɲ  <$` qX/M dzqկ@u]/jА+l}VtIX mknQVXY+F5-jGGuȌУǫPvi5(v3bA_W"Z6~gz8_!27w~,Oܙ:yɀUMJ^{)-KdE `[d~N-˗U 9 UO$g b UҝfM.(# t& #0'R &ժMRaOQXwU4Ȟa+ HḁļO̩,5J{NYw.-'=A .dƴd F©s|3N뫑pD#Dv5T7\ Dd/6JҲ q 29Dc†E8 wЍ2|눲|K iZ=|&mOxs~0AzR3CBa4{ʸ7h,'Whw߹w3;Yȟd 3᫓Ejz84`L)v6:{81ʛxOe3_`B%OSTu >,f E:y=:[&]O;b<ݵ99=y@/;V!/x"Xt>~K|v95 TW]?nW荗J_(\W,c{?P:?oÕ?UBt ?jv/kv/kv/kv7?}%U6zEEdQ%;|E+F*aKgjޕr9u(b½Ρ/:߫iC ;D|_f;;Xy|I;HM']0] %|Y^iAO?UnJ=/7kT*M1FkcH!`@w${&DKOI'~^C|` {V.Xo07֤O1o'8=~ۙf1VwjCoYM޲.h,Azs 듋c향 "ʚhT2CX.T{, 07hhڦA7~bw/׌s{nh˯v.wk?[g>O/;'ܶO21vZMn|+xA_[xxy孅Q6U\Knp]~nc46;4>Qu֊SGoFeƉT\5lEelݓΜŽ8 {fXcydwv ɷ^3ξ[g_t4]Z~:C\~g;IRdEvV؏I\`|"('[ Gմ,sģk"NhR 6n3ª (Qxѕ E08+LX <u)(,-Afj{З4^$k O#G9 }[("Jk^HiMqT$E{Ƨ(G6너7ѦkL Sܴmf )|yh`|g89%uZfiMi*FW/ƣtZ =|iOF&AYD6 S4kt6m{$p4oE;8GEcB5' sJV;6-Q*fA刦Xr?Bz2d6A6$O E )ش&|dOkcSHszJ!Yl1|X@FSm̱1՗J5~o!uK}_i 9,D(ጒa7EF XG:yLK@gk 2;87E='~tRwMSuѳ$PHC'@X@ i35oQju3l=l_ˉ01M,|!F"gPafM0\Udh܇|Hz~.AQ44MqCHtXE??G@#}(\{j$h}.wC`Z,a"$JXI|DsM/x 8alGA' ~X_P:rޞmh49܇/^[(s=;ȉ)qM!i \ԝ<=]0 {0[yŖDF»bhmqX2O{cRn2A::uqmXBˢh"bg Zț#m x:A٢Azi/yY3Hq\-ٖקSbߩ~Aq 堧Y$Cyg\ H4oĴgq<8 ȫDCM }PY匣d!)M1@6fqɤ]!Wy_5Q(QM$ԸD!,A8B:]a\m0Um!€_'#p!?28|^Ėu͚'j>Y'8wgR]SIyjjt`vC;Ҡy;'L2#3a|cDFӅV]$ 5(!OMq\N' %?n{=! v$IRZfuFX̃ěɄVy+(V$ٛ| &K Gñi`X06x.Ik#BI\lF+&!_t[JmLzu\g3L-\ǻe>%5x&Jj jXDR*ӹgj C̑.eqk!OOifSir'-Ih/q ha:gp/o-wEk]?uf_sBߖts\5PҾy%(X,yvo0ӭmP3w90n3SdĬm,}s-z`@rGv]eQh@t*`Ev`TT&ƀ?}tz a@"QO/K]8~$Fs;vԞ8@QC`kP"RX-׵0ۑZ?fMڍgæ CFbM/_ >Mr"<%?"k\rf{gQoe?{q pSw{N'$ـv%&?$%(J!%QWMUvĐN.}Է`+>u*t}F6t֭{J pvU8o[7 n]ePChcs!lc}j֭{{J pq)2YI/Vz̕T (bA.b|!i n/VURQ'<ݗ\s;['pxuZ{ݼ,};* Z+c3C 0IO/TԂ,pf #ש)owV:mW:-~p*oƎrZhTs <5AWFBwL{8)`TK6Fww޵QN{OxY{[.+h)CvRfuV;ro!e 1@UF mh;Iq%nJfF;ԎJJ1nų &BI@F&(c^xtP1\QЖCBDEX@/KEVJ_9Q22d>'9n?jEz4#QFg?S#icRIa.|28t +ZPN%qn'uph|HjB%yĂC02sw8 ;U[O庡V[p~V4Z1`iB^\튬KSt;ƅF"g YmPZ+ɔӓ0#|JHwJ/1KbrFgN~cK>$9gTۋ`ЄõvE1gݥ+`A=*SBiinF*Rqb%$N F|OѦ}//Zxm4Gqqn(,3ұXõl 0!Q7rI;FҹSOf_gt60DcD@1"=2bB!pB'lTK0弑J;y:%͌cl$AQE rK0![0Yg i:]] ɟ(нu7lJI̳#a>?X?ninip=7T?g1l) _-+4ّ(P'JTvȘ'j׼/Wa=, Ɗ KBo1 >'+!bDlTYFJQYp>ĕ4֋suŮ8Ƚ`xX?x{~>2CeGF[slWw:BA)̯~8QNSr[Adǯo: Hw-^FK/ x%-\ `C"O5K4DYǃ'EcUmU`Ր)Ov9pĠin4cwS즋JC09.!sqts,{NOJxKR\4YD CHR_ .<|69=LҚ]+ ۇm| VC9Wp+ NȺQEKx#6MOqs|N@c} Zq@EglUpV:qspd)l1IٔE W]as]}Ljm0@>x.?wbήlY/>ߵn٠~0Ytﯕ֢0Agr,FQZ㛺XY)yAtY=$`8Ǔ?h{+=Xno7_nXKNyFg+X=Jo60uI໷@5}wO .KwЖoIJwoV]B7CVI[!;.U^eF8"`\t.P ZB,sqۿ @ݨ; ~ǷF-,5O`K]R*0؏F}6j}ef)?UUCuQկ,},VR`oiKU3]O8%FR)G@b^JE" LqlӞ#IQ"9Y!*3 I\LTEHVaoQwMZ@rv1wn%߀֦dU֔, A+Szrw}x][hp {ͼcXsU/|P]/ը5[ꃞjRek$0\7 ?`6.gr42Z(췟 r~w,{mp7JRyGRN֧dgDoio҂5i^h!*G+Zj&Eۛ}I rkq^̝G?r ;/x>OiiV߁y0>O[kk\ӄ/A@eLQ󅏑Q-mK Ygdۢ`7Z\Jcy'T^E6LgWO5$BoLb8?pT}w.ka/jn.48;='qiGbxFEnotAh1sۆ憄Nn`vHS)=87>_ c,J:-1f u_sSVgT#@c-v]-26F[> Ө3)ŬXm1j͍hT+gGmʝ4_o]%t( 5~ӈ~i^Ecq]RWkkZ?0Z]3#/Xټ7v@usjdX<2]l%P>_bZ\B71m>Ҽ1S 4ɷݨQ[GTQ3ZYGYJo֭&vuC^8Eor,tj/vBFx= wBbugW)|ef8UuOQZ x=eYZwZN7a>&KBF~MW},PRE\^CKAҧRX*9zD/X*tKGΞ8Km%KX<ՎY@,$8*N(w:GdY%*̭I^L[OݒH:E4ͩKc)[Fӯ_8KX %.UR:r ^W&6Kc)_S][oG+_8{{WC` AINo0(;栗!EJ=!90lJK}UUBizoW-=F--"zRZZ-u-⨵TFOz=^0zZRm@W-=j-eLKᡢZdΩ6_9En 6o~+}0q~~՝ϏaTz7dݰB;C7+;[h_iqU,9:Om?}LW'n6Njߔ?Ԏ@< ZGbibZ1&Д^?vI;I0h)=C ܝN(By bQj|$=jH%a̹ĉL bߠ s*0mAl=Z;eP=z>"ft1[n9NЭs ͳ7߹n ِ~3\4ə^\azՊpv2%O?Y[Pk*2Aon#Nc?SӋ3۫Eg]yw$ f7"P%\:(hu+y^-۟xQ]~fWE*lwl8`6ϖxDfOu5 0zrsQTZnd:! #,t!=aWoujrf>jfV?˳*!;n's3~$>ͮRr orrrrTWt|A L\&*)Oe1h .(+/ǹ=YT; ۾ɲIrIV-yTpc?[jQuaZuNMG^b 8n殞S2n8kvDn6 -s8"TkJ o#Ji_:Ht/q; (TR则r;8cdNVUJJ`]T8Vmrh<%Unf׊^N9m(8d=7חҜiМ0݉W<=s|ω܀7 fGWgc~P ⟓gtv^y22{B=gnvuۻ%~LL&ڲ<JPs,gB7 qCڔ p 7;tD2HLu`j}E<Mй S@?jDL>""P1kK ~1{2!01qNHy*E&!X#Y<6_k-/yXaߙ ͓GP0}_.S2EB,u:ymSjUԊI j*-׵s7iWMB )9ѯQG)[!GYȣ,Q]6xĴ%PLjm0Rb%0hˤvJa}~BfлLɬjBd%dS HОGCw"eTMQF*hɒ1t+Ri=!ӈk8~6,Ԃsߜnh-H!3Z 3Z7#AIeD3hCT0%P=XP7xxKqeu -}T1˞2))HڟȵTjw6xyЫz:޸?Ɠa"EnYȿQ}'t +,R3iGsjPmue}ai gǗV=?OY:y Cy?϶?ry4|$<mGaw: x$ݝ`顇#BS˹31-fJs6)s) ~vd Пa{7x~qSvB樁FR@'z2݇!u8GQ{(\}zLG&Pidžm叄6\7VJ8tu˒B1W_ϧ?ŶzZϊmceTA^A4PEµRJV[Z\[u>-ujOnaK}.PC빺C`hk%m_5@@Zl5ުq~)}')s@WTm2-b sYr8bzs$fy~fxó._=qiQJd|w Rȅֶ(tI$P!xR.1*j- 2)TsBb=XlBe1)gy%7tqI% Ԃ&|Eo+&E?\*c>T%钩+I48?:QZP:tzZPLJ~jN`ejѠV 6m|輀Uusf Xı  \nk# D9zC7:5 Pj?%3 hU`%mihb{R^,hoavo r];1m=jZTS6//.KEOPv%LIﯼûdquw3]K M򒽚n cJKqyI렾Z+?Vm؁L .ތgӜPnqPL#w6#r=M'z^qp'T?foO7.~~~~TW뛷'?`@R|}.y3ga`9J'LAdSHa'5̠<]ӈFȖI֢I{<-uXPFTS,r?; 4`Qplyӹku/nҮf`Z@V$m~t$W`xZ њ?A-=l3cq}ҫUyB -Y٩'lOj#M' %=K] JV8*fID/,6j.A.h/7dE,TuuAoO-gQr1X()G^:X(LUe-X)fXhTl=h/z6=FuskϦ@KRF7[2绹ނ圚yFGfWr_V?]ʝfJr^7+ehbاW|{Y\zﺻ*wH@ ;Py!s@ i92{,'Sm9r2HX^t S:igW9!:** |M.$8O0+4J-`T;Z]h\Y(%U6Ȋqy* s$Dz) $p$%:gS:+R@Y@C]c9%t) 9! %zDq*j* kw;#?Fg!ոBM75CC^|J~H% )UЀf7MS[0 P8]+xH2ND@%#QaEU :5v[_N Z Y]Վ6D0>cE( VEKǑOm1~_kl8Fe.Nfnl(WFE_oF1 b6DRE@"e :93_&cLD`ñà[|gnݬ5NF*hR0"GІ,Ȕ\'Jય;4XMN'+ e]E pH0b2[Ǯxz11QK˧QNoTWE:-eՉ'Pu w WT2R Kƙ;W- <F8y5H8BH )gx'RHf_ͬYw\䌃~ԊUG(3YX/jnK2_ ZK֓qv%u }r/.`*l@}ԝSvgqY&-6&%tV*$S$`J_Վ$%j?( N}YR!0iε~Ig;[c7OfiUR&Dp)ȸYeNz\BCߩpy ƻ)2iq+F|,Z~cMSXD~}qa'G K|:"mK$w?IG1DVh$Й!ʩgū z73$]_M̽ i%jFGLP T$i*!x%14t\18oJtf>H$n>6^3|xg[| ~B7n  TAٍ`pQiDVb4| RmKpp{9etǦVUR{C7G1*hyơIy ֙(]-']G@vRz5rq^Āv#,a&A26Em hDOt txB:;+#nZʓU2dIV{H6@k; ZQUǯ|. yYrY8xh 7LJ9 7,IXTӑIc,g\V\M@FJWv!^]nG(=+H)>&:9 ՛rd*&YUe +_ދ+x6\*&AhڱaTSf!rKYxIqHP[!Q+=<:][o+v6/U$k< b93sՖ#K$;߷(dǖQkdt*V+U5-/7BԻ"X$)ΓsVWcSxJz-jyt3BcE,TQDr4=NEhg~QhHDF9嶥4ɢsFhQDIԠk&:EB /&&^hΏǧlKFilLD+7f 8Ul7+gzCJ3BaZ{i,wh9â\MgZO8BӋ8:}GZuW+WTd}۲`=[F"U qct 1:q10Z'Vz{Ў 5c܆Ǚ8bcc15= Vh%9(j!4蒗oc;w㽫6Ņʬv^ڲImOCuyBVUrJ 5J6G~[{Wm>|t6"BȤ8_tFmȤx2Q)]j{|^3 7;eȐ ,8Z^ k<<I&% :#bF\ NTb>L%W79uU̮GDBHy~ ol 0tAH5jp<= ;Pm4hnKlQ$A1`o~: 7Dn\)0k@:u.ȆRG-.9I3*Nbb̋Xm<3qrNPeK*d^*;[,zg|Jn'z%VmJ,+*]8Ξ-* Jm3iGəP2<0U %)Zsq)ga䣍pٸUWj2@B gecXqfv]7ƢQN]u.?,$n)l8#c!0Z([)SrX <.il=]V* fw`˳lsmYBhw` :;4 FK,g=F 9xBsVZ"ͦGaEN- }P񴜦e#)b1T+A.rY$CNʇae/K. de&@z)?&mA5S89!rմWTIcjcϡ[S~.Ѥ>o3Y愮-AX"(ׂ%$-bJVcVS'fuqz1`3C. P ǔB-CwVXx6G :Qb"YdWɊ U8wHnt68]F球*}R9ͳ?d@CDgkqǞ@ \{fu]>?ȼԧ$p1 } o /t BjkM,Ohf+];,҂{,bA|'5x` pU{[l]eOe"OLmrz|ԕ( IL!Z/1YȶU#?.[4LD܀ɤf;a +'A 8]&Yl0(V,(tj3mрwcg1$T<_Dz+e3j7{,W d8Rgbp-vYzָ5tv]{Lo3@(?;Mq.MmP'`_dz4SF 38+I?S~wVMoaeiY Z&˖\i_Z{avޕw+ ;}ܩcxGxTюS,to&9N/oxR/'`*ZL+ioR>M'4h}3f7?ӟmV=.-&3COOlj~+o9N~,?lTx}-~\,9+=J.}?ӣ%#a}NFKF?C&ln-y4; V;<|2,>/OInph7SS غ͍2X5+&Wh%X֘N:.91+iK'~WIr+}xW*Zw66-icjpRVQ\M7znV4qA۳9js-xkg#yq%zHhzjFw1 $a)R-nϻ£riL.i)d.$D֗~`+=,}<.Ia49jۖ*'];ȔԄ$m2 ( OlSM9D[Jjhz+h {RDi7: 6!EIG ՜T6Q&QјrɡBXAj*1ɒIJM=|k?JP۴iTW{XT,Q^K śizWf%3cI eWdG*Rlb2m賷ձi)t$z&c(ۓk#"?I_l|x 'mkVP˃l&()rmy()GPqM,@V+qW=M!bK9O׮(v+=283ޝcv;ZQj%[ق;לj/@;A[>iX-+ @nhRR+<=2x|*4r"|8 '*m+0QfS@’2H`Iʅ,bT {\Ds$i=^Ҹo[4EtM<8âj*,kFPtl)K|\wj};X!KĦY:5K) sM,R V*P 5%HRv*(H]+;P-Y7Lx]fSY߯z%'^>")Wh)_7wzew{Mzs>.EaYY~gVH+&^? |<MfUgA}{髿iK {0_Xs?}ZX( t`=3Vuc< (eIT 4E51{Sx.gzF_ָSrK?4vON?ZludYWKҙ9E6-SvQ(zI/qXg9ai Kw6wpv(m;]LKU d#)עdk $mkXnJ!8'#x置OY;V(%]@b]|l~Ng' g ib'hW6Z[kVgr7g叮IP6 1;64O w8,>7d”Pb|8D0L$Nh tot ct\kw5._)p>^^?׌n܊[߂Pۊ |ߝ6t~*<{3< {C6q]8Zf)kib'Rx˻ΰjyg&Q-J)a];ZOsx5`9`,?>su06wx?1{iuGKN|\-|gޮs' g5_i8t򫇃Ox|'1X_cg6Y j&-}w~ݷ|E36sm Nޗ/`$V6]eit1R޳VD12jҼ=ģY *om66w(V`r#cBz ZFKd^o`&qH NA9!B, NڔJcTuXC!ٯtzi?"z:6sƩ9 e8J217 DHPs&2 I(al4U#5^R&uh󑋐$PNCܛi+d2Jy9k_ƒުvNXEL_pUPܖ̖Ʀ|49x!Dٹ""W*ZdlVI9XJf_wGHo8t$ŃrODfWF.imI9ByrLQ_\Dg,WH8 xn؅A.|~^mjQ}}qp~ؚ,g.(;[6] /Iҿ'v^`i5`?PTm{ E} a$Σ<@_b|1*FDXlS,{oFBQ1 EJ6t#_d*DэW"UHҊ*G&}I;.^<7iuGY2L GĔ2SslCLEIn^뵏;ߟ׎ .!?\Ldk)fBO?o\~&^aQ$26.(&~'J@)o(;V)xYEÜ$KX$6݉eYR5 $x 2Ljq])_pk\2Łd>F#20|Y?.MAtڑÛHPDLdL&p$=*)IWBv.LQ-wμY.!f<(`ʚk#\fV r'͂ĸ7*/#a.-wn$;7sp;6(— 2> t|̱Bqc+:: _Ne222yyC6/pf91{M"͐R uFZa@?7"`+(x?_10xL%`50*r#OVD/$лos)d0˛G3CD x,Sq^fNaHXxQ3,1e'^~LUDa_^O?w^~4O.=]ץ߽kJM~}F{䪟\xL\iW6u?ߖ):Z{@ YSݳ&no^#MuW-C4A KCzːFdyKit a͎nTa{6,8ʍbW͎ mC8t[|fkct6F; Ÿj ZXƕB/y[ŀr4}z摠wqpwG- rMng6rq"JX*5 h}ƓE E<ZkZ/˒(E4䕫h#uJ5A4}Gv d*̺5Lnmh+WF:%y#Mbڋح)RT;Xe=Q]mݚInmh+W:E) 1pfN_vNFuz?YԞlp=]? ̓X;U:秛W1OSF8ݼ{xؖ׽φ>}d)z,h,J 1@ꭸD\ͯc=YZle!MG;&H GS5Rj,P(M WK5gM4=6 6]\%yb?ٟʀ"O櫘Ԝ>xH,8G#nBZpHfÞ#EXWjMyӀ,b{2I8UE[,ԐC^ =/ID;*;&|M]٘';V;&|G3kt/ Ea _%9.[>24+C 櫁,Q&l %%4v[tW 8=PoO@`i#@u=Z\+Vkua(Mse<6W9 :j2!5~jԘj*PamzFlurƭ<2˭ *2 sBcL q#S T!? sC%ni+ٖdI!ڑXkpp%H%jnh+Rja+I5<װ4BmS{A1RT^C<0dK˒y %VgG+7Q`d zw}&U$,>G0J.GyK sIv1hmPx4oGdš#u#S,։\0ҎHvpG CV6ycw2"3 0~s*Gș#*(Lpߟ*Q36 rjJlz}&Zщ38lr*m[QN%=Jc\"Hܗ7҉Ƴ}yֲ'Ym<-~ct4o}lH=8ۨaQ)h)AkΌ8AEy޹"W 8v=[BG aCl*ӏĽNmeI]|F_jSo+8)=/5̑hގeaW/N*T)< ȡ 9WU8*<J !8D0%Wp%JuwxF_dS3w+/EKnC h|һ 9 V$wdX.!j>eqv*!))SI;yyKz>D ;iXˬ}[ +i򔌀4oGy˵n}FJp DkJ xqJ#_uK]XY 3[ܵY晠h!Fdh] ^s/Ya68(\L]vSv@m<pYu]b3Dsjމx9cWQaN3I8'=q☤ɯ^U쀊4:l4mNB΃u rS7Bʂ yT4ҷfMZ%3o)SHVV*Y%8:^ n}cbml sn H FJZgT#SB(߶rmJ%ҤPp*Z{ PdH9("&B7leT`ycq KܱLOl ;R`/~yƵcoͦ7l${;rrc6WWbrsUWjJz8r)@͕7nu5(*G,thnV[(_-VϤ ஻R~q}( 0e9o:bGBydN*9<4U v5mii|c=ֶ h[5ԘxD:F2{.K,S2\Pw:L;l)x|>l*πivR(@V贮MZ3X2tA-D/ eHq0{ lCj2cyO\%+H4'_9$`wyIk*εf-zPa[\mBz]nqCij7obEʲV랜jz>𮫎}Bهwt[?ig͟sWIz-I=%JI}- (}(M]! (n$PzHm5[ˇ(}%(?z=var/home/core/zuul-output/logs/kubelet.log0000644000000000000000004547637615140107124017716 0ustar rootrootFeb 02 10:53:24 crc systemd[1]: Starting Kubernetes Kubelet... Feb 02 10:53:24 crc restorecon[4687]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:24 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 10:53:25 crc restorecon[4687]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 10:53:25 crc restorecon[4687]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Feb 02 10:53:26 crc kubenswrapper[4838]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 02 10:53:26 crc kubenswrapper[4838]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Feb 02 10:53:26 crc kubenswrapper[4838]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 02 10:53:26 crc kubenswrapper[4838]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 02 10:53:26 crc kubenswrapper[4838]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Feb 02 10:53:26 crc kubenswrapper[4838]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.218691 4838 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224184 4838 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224218 4838 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224230 4838 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224241 4838 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224250 4838 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224261 4838 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224270 4838 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224278 4838 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224286 4838 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224295 4838 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224303 4838 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224311 4838 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224319 4838 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224327 4838 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224338 4838 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224348 4838 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224356 4838 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224364 4838 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224372 4838 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224380 4838 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224388 4838 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224396 4838 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224404 4838 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224412 4838 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224419 4838 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224427 4838 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224435 4838 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224442 4838 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224450 4838 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224472 4838 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224480 4838 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224488 4838 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224496 4838 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224504 4838 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224512 4838 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224519 4838 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224527 4838 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224535 4838 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224544 4838 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224552 4838 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224561 4838 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224568 4838 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224576 4838 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224583 4838 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224592 4838 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224600 4838 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224608 4838 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224649 4838 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224657 4838 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224665 4838 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224677 4838 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224687 4838 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224696 4838 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224704 4838 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224713 4838 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224721 4838 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224729 4838 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224737 4838 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224745 4838 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224753 4838 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224761 4838 feature_gate.go:330] unrecognized feature gate: Example Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224769 4838 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224779 4838 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224789 4838 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224796 4838 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224804 4838 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224812 4838 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224820 4838 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224830 4838 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224838 4838 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.224845 4838 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.225772 4838 flags.go:64] FLAG: --address="0.0.0.0" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.225794 4838 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.225820 4838 flags.go:64] FLAG: --anonymous-auth="true" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.225831 4838 flags.go:64] FLAG: --application-metrics-count-limit="100" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.225843 4838 flags.go:64] FLAG: --authentication-token-webhook="false" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.225853 4838 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.225865 4838 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.225877 4838 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.225887 4838 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.225897 4838 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.225907 4838 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.225917 4838 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.225926 4838 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.225936 4838 flags.go:64] FLAG: --cgroup-root="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.225944 4838 flags.go:64] FLAG: --cgroups-per-qos="true" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.225954 4838 flags.go:64] FLAG: --client-ca-file="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.225963 4838 flags.go:64] FLAG: --cloud-config="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.225972 4838 flags.go:64] FLAG: --cloud-provider="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.225980 4838 flags.go:64] FLAG: --cluster-dns="[]" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.225999 4838 flags.go:64] FLAG: --cluster-domain="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226007 4838 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226017 4838 flags.go:64] FLAG: --config-dir="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226026 4838 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226035 4838 flags.go:64] FLAG: --container-log-max-files="5" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226055 4838 flags.go:64] FLAG: --container-log-max-size="10Mi" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226065 4838 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226074 4838 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226084 4838 flags.go:64] FLAG: --containerd-namespace="k8s.io" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226093 4838 flags.go:64] FLAG: --contention-profiling="false" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226102 4838 flags.go:64] FLAG: --cpu-cfs-quota="true" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226111 4838 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226121 4838 flags.go:64] FLAG: --cpu-manager-policy="none" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226130 4838 flags.go:64] FLAG: --cpu-manager-policy-options="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226142 4838 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226153 4838 flags.go:64] FLAG: --enable-controller-attach-detach="true" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226164 4838 flags.go:64] FLAG: --enable-debugging-handlers="true" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226179 4838 flags.go:64] FLAG: --enable-load-reader="false" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226203 4838 flags.go:64] FLAG: --enable-server="true" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226215 4838 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226242 4838 flags.go:64] FLAG: --event-burst="100" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226271 4838 flags.go:64] FLAG: --event-qps="50" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226283 4838 flags.go:64] FLAG: --event-storage-age-limit="default=0" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226294 4838 flags.go:64] FLAG: --event-storage-event-limit="default=0" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226302 4838 flags.go:64] FLAG: --eviction-hard="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226314 4838 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226323 4838 flags.go:64] FLAG: --eviction-minimum-reclaim="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226333 4838 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226343 4838 flags.go:64] FLAG: --eviction-soft="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226352 4838 flags.go:64] FLAG: --eviction-soft-grace-period="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226360 4838 flags.go:64] FLAG: --exit-on-lock-contention="false" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226369 4838 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226377 4838 flags.go:64] FLAG: --experimental-mounter-path="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226386 4838 flags.go:64] FLAG: --fail-cgroupv1="false" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226395 4838 flags.go:64] FLAG: --fail-swap-on="true" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226404 4838 flags.go:64] FLAG: --feature-gates="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226414 4838 flags.go:64] FLAG: --file-check-frequency="20s" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226423 4838 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226433 4838 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226442 4838 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226451 4838 flags.go:64] FLAG: --healthz-port="10248" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226460 4838 flags.go:64] FLAG: --help="false" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226468 4838 flags.go:64] FLAG: --hostname-override="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226477 4838 flags.go:64] FLAG: --housekeeping-interval="10s" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226487 4838 flags.go:64] FLAG: --http-check-frequency="20s" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226496 4838 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226504 4838 flags.go:64] FLAG: --image-credential-provider-config="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226513 4838 flags.go:64] FLAG: --image-gc-high-threshold="85" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226522 4838 flags.go:64] FLAG: --image-gc-low-threshold="80" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226531 4838 flags.go:64] FLAG: --image-service-endpoint="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226540 4838 flags.go:64] FLAG: --kernel-memcg-notification="false" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226548 4838 flags.go:64] FLAG: --kube-api-burst="100" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226557 4838 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226567 4838 flags.go:64] FLAG: --kube-api-qps="50" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226576 4838 flags.go:64] FLAG: --kube-reserved="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226584 4838 flags.go:64] FLAG: --kube-reserved-cgroup="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226593 4838 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226665 4838 flags.go:64] FLAG: --kubelet-cgroups="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226675 4838 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226685 4838 flags.go:64] FLAG: --lock-file="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226696 4838 flags.go:64] FLAG: --log-cadvisor-usage="false" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226705 4838 flags.go:64] FLAG: --log-flush-frequency="5s" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226714 4838 flags.go:64] FLAG: --log-json-info-buffer-size="0" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226727 4838 flags.go:64] FLAG: --log-json-split-stream="false" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226735 4838 flags.go:64] FLAG: --log-text-info-buffer-size="0" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226745 4838 flags.go:64] FLAG: --log-text-split-stream="false" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226754 4838 flags.go:64] FLAG: --logging-format="text" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226762 4838 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226772 4838 flags.go:64] FLAG: --make-iptables-util-chains="true" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226780 4838 flags.go:64] FLAG: --manifest-url="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226790 4838 flags.go:64] FLAG: --manifest-url-header="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226802 4838 flags.go:64] FLAG: --max-housekeeping-interval="15s" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226810 4838 flags.go:64] FLAG: --max-open-files="1000000" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226821 4838 flags.go:64] FLAG: --max-pods="110" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226830 4838 flags.go:64] FLAG: --maximum-dead-containers="-1" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226839 4838 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226848 4838 flags.go:64] FLAG: --memory-manager-policy="None" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226857 4838 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226866 4838 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226875 4838 flags.go:64] FLAG: --node-ip="192.168.126.11" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226884 4838 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226903 4838 flags.go:64] FLAG: --node-status-max-images="50" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226912 4838 flags.go:64] FLAG: --node-status-update-frequency="10s" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226921 4838 flags.go:64] FLAG: --oom-score-adj="-999" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226930 4838 flags.go:64] FLAG: --pod-cidr="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226938 4838 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226952 4838 flags.go:64] FLAG: --pod-manifest-path="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226961 4838 flags.go:64] FLAG: --pod-max-pids="-1" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226969 4838 flags.go:64] FLAG: --pods-per-core="0" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226978 4838 flags.go:64] FLAG: --port="10250" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226987 4838 flags.go:64] FLAG: --protect-kernel-defaults="false" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.226997 4838 flags.go:64] FLAG: --provider-id="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227006 4838 flags.go:64] FLAG: --qos-reserved="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227028 4838 flags.go:64] FLAG: --read-only-port="10255" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227038 4838 flags.go:64] FLAG: --register-node="true" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227046 4838 flags.go:64] FLAG: --register-schedulable="true" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227055 4838 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227070 4838 flags.go:64] FLAG: --registry-burst="10" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227079 4838 flags.go:64] FLAG: --registry-qps="5" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227088 4838 flags.go:64] FLAG: --reserved-cpus="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227096 4838 flags.go:64] FLAG: --reserved-memory="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227107 4838 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227116 4838 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227125 4838 flags.go:64] FLAG: --rotate-certificates="false" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227134 4838 flags.go:64] FLAG: --rotate-server-certificates="false" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227142 4838 flags.go:64] FLAG: --runonce="false" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227151 4838 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227161 4838 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227171 4838 flags.go:64] FLAG: --seccomp-default="false" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227180 4838 flags.go:64] FLAG: --serialize-image-pulls="true" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227189 4838 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227199 4838 flags.go:64] FLAG: --storage-driver-db="cadvisor" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227208 4838 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227217 4838 flags.go:64] FLAG: --storage-driver-password="root" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227226 4838 flags.go:64] FLAG: --storage-driver-secure="false" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227235 4838 flags.go:64] FLAG: --storage-driver-table="stats" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227243 4838 flags.go:64] FLAG: --storage-driver-user="root" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227253 4838 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227262 4838 flags.go:64] FLAG: --sync-frequency="1m0s" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227271 4838 flags.go:64] FLAG: --system-cgroups="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227280 4838 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227293 4838 flags.go:64] FLAG: --system-reserved-cgroup="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227302 4838 flags.go:64] FLAG: --tls-cert-file="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227311 4838 flags.go:64] FLAG: --tls-cipher-suites="[]" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227329 4838 flags.go:64] FLAG: --tls-min-version="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227338 4838 flags.go:64] FLAG: --tls-private-key-file="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227348 4838 flags.go:64] FLAG: --topology-manager-policy="none" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227357 4838 flags.go:64] FLAG: --topology-manager-policy-options="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227366 4838 flags.go:64] FLAG: --topology-manager-scope="container" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227387 4838 flags.go:64] FLAG: --v="2" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227399 4838 flags.go:64] FLAG: --version="false" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227410 4838 flags.go:64] FLAG: --vmodule="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227421 4838 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.227430 4838 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227721 4838 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227733 4838 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227742 4838 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227752 4838 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227761 4838 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227769 4838 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227778 4838 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227786 4838 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227794 4838 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227803 4838 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227810 4838 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227818 4838 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227826 4838 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227834 4838 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227842 4838 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227849 4838 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227857 4838 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227865 4838 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227873 4838 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227881 4838 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227889 4838 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227896 4838 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227904 4838 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227915 4838 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227924 4838 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227932 4838 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227940 4838 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227948 4838 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227955 4838 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227963 4838 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227972 4838 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.227998 4838 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228006 4838 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228014 4838 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228022 4838 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228030 4838 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228038 4838 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228045 4838 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228053 4838 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228060 4838 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228068 4838 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228076 4838 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228083 4838 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228091 4838 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228099 4838 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228107 4838 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228115 4838 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228125 4838 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228135 4838 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228143 4838 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228152 4838 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228161 4838 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228169 4838 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228178 4838 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228185 4838 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228197 4838 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228205 4838 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228213 4838 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228221 4838 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228229 4838 feature_gate.go:330] unrecognized feature gate: Example Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228239 4838 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228248 4838 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228257 4838 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228265 4838 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228275 4838 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228284 4838 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228294 4838 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228315 4838 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228324 4838 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228332 4838 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.228341 4838 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.229284 4838 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.242515 4838 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.242573 4838 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242742 4838 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242765 4838 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242774 4838 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242783 4838 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242791 4838 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242799 4838 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242809 4838 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242824 4838 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242834 4838 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242843 4838 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242851 4838 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242860 4838 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242869 4838 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242877 4838 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242885 4838 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242892 4838 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242900 4838 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242910 4838 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242920 4838 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242930 4838 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242939 4838 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242949 4838 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242959 4838 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242967 4838 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.242997 4838 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243005 4838 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243013 4838 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243021 4838 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243029 4838 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243037 4838 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243045 4838 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243052 4838 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243060 4838 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243068 4838 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243080 4838 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243088 4838 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243096 4838 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243104 4838 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243112 4838 feature_gate.go:330] unrecognized feature gate: Example Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243119 4838 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243127 4838 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243134 4838 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243141 4838 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243149 4838 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243160 4838 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243170 4838 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243179 4838 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243189 4838 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243199 4838 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243207 4838 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243215 4838 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243223 4838 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243231 4838 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243239 4838 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243249 4838 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243258 4838 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243267 4838 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243275 4838 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243283 4838 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243290 4838 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243298 4838 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243306 4838 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243313 4838 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243321 4838 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243328 4838 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243336 4838 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243343 4838 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243351 4838 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243358 4838 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243366 4838 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243375 4838 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.243388 4838 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243677 4838 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243692 4838 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243701 4838 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243710 4838 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243718 4838 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243727 4838 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243762 4838 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243772 4838 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243779 4838 feature_gate.go:330] unrecognized feature gate: Example Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243788 4838 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243796 4838 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243807 4838 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243818 4838 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243827 4838 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243836 4838 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243844 4838 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243854 4838 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243862 4838 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243871 4838 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243878 4838 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243886 4838 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243893 4838 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243901 4838 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243909 4838 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243917 4838 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243924 4838 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243931 4838 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243939 4838 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243947 4838 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243954 4838 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243962 4838 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243969 4838 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243977 4838 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243985 4838 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.243993 4838 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244001 4838 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244009 4838 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244019 4838 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244029 4838 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244038 4838 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244046 4838 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244054 4838 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244062 4838 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244070 4838 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244077 4838 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244086 4838 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244093 4838 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244101 4838 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244109 4838 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244117 4838 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244124 4838 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244132 4838 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244140 4838 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244147 4838 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244155 4838 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244165 4838 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244175 4838 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244183 4838 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244191 4838 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244199 4838 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244207 4838 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244215 4838 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244223 4838 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244230 4838 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244238 4838 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244246 4838 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244253 4838 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244263 4838 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244271 4838 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244279 4838 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.244289 4838 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.244300 4838 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.244558 4838 server.go:940] "Client rotation is on, will bootstrap in background" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.250248 4838 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.250368 4838 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.252384 4838 server.go:997] "Starting client certificate rotation" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.252427 4838 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.252725 4838 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-31 10:54:10.0794797 +0000 UTC Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.252873 4838 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.277411 4838 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.281028 4838 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 02 10:53:26 crc kubenswrapper[4838]: E0202 10:53:26.281945 4838 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.241:6443: connect: connection refused" logger="UnhandledError" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.299229 4838 log.go:25] "Validated CRI v1 runtime API" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.341327 4838 log.go:25] "Validated CRI v1 image API" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.344135 4838 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.349834 4838 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-02-02-10-49-07-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.349891 4838 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:44 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:42 fsType:tmpfs blockSize:0}] Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.380708 4838 manager.go:217] Machine: {Timestamp:2026-02-02 10:53:26.374372816 +0000 UTC m=+0.711473914 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:33893a6d-b6bd-46d3-8543-3002098168f9 BootID:6d1a8705-67dc-4d3e-99d4-016e519e43da Filesystems:[{Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:44 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:42 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:1a:94:c2 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:1a:94:c2 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:20:d9:f1 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:ae:10:e4 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:bd:1b:b4 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:fa:29:61 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:7a:4d:03:f4:c1:d7 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:8e:e5:f0:8e:86:4a Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.381119 4838 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.381297 4838 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.390308 4838 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.391359 4838 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.391424 4838 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.391878 4838 topology_manager.go:138] "Creating topology manager with none policy" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.391898 4838 container_manager_linux.go:303] "Creating device plugin manager" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.392580 4838 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.392665 4838 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.392971 4838 state_mem.go:36] "Initialized new in-memory state store" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.393108 4838 server.go:1245] "Using root directory" path="/var/lib/kubelet" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.410898 4838 kubelet.go:418] "Attempting to sync node with API server" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.410952 4838 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.410982 4838 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.411003 4838 kubelet.go:324] "Adding apiserver pod source" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.411022 4838 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.416532 4838 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.417969 4838 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.419850 4838 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.420832 4838 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.241:6443: connect: connection refused Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.420869 4838 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.241:6443: connect: connection refused Feb 02 10:53:26 crc kubenswrapper[4838]: E0202 10:53:26.420945 4838 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.241:6443: connect: connection refused" logger="UnhandledError" Feb 02 10:53:26 crc kubenswrapper[4838]: E0202 10:53:26.420984 4838 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.241:6443: connect: connection refused" logger="UnhandledError" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.421523 4838 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.421566 4838 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.421581 4838 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.421595 4838 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.421648 4838 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.421663 4838 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.421677 4838 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.421698 4838 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.421715 4838 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.421731 4838 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.421757 4838 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.421770 4838 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.422701 4838 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.423332 4838 server.go:1280] "Started kubelet" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.423658 4838 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.424395 4838 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.241:6443: connect: connection refused Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.424214 4838 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.424833 4838 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Feb 02 10:53:26 crc systemd[1]: Started Kubernetes Kubelet. Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.433763 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.433819 4838 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.434124 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 18:34:08.54733205 +0000 UTC Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.434777 4838 volume_manager.go:287] "The desired_state_of_world populator starts" Feb 02 10:53:26 crc kubenswrapper[4838]: E0202 10:53:26.435037 4838 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.435215 4838 volume_manager.go:289] "Starting Kubelet Volume Manager" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.434959 4838 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.436862 4838 factory.go:55] Registering systemd factory Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.441791 4838 factory.go:221] Registration of the systemd container factory successfully Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.437226 4838 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.241:6443: connect: connection refused Feb 02 10:53:26 crc kubenswrapper[4838]: E0202 10:53:26.441936 4838 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.241:6443: connect: connection refused" logger="UnhandledError" Feb 02 10:53:26 crc kubenswrapper[4838]: E0202 10:53:26.437547 4838 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.241:6443: connect: connection refused" interval="200ms" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.449016 4838 factory.go:153] Registering CRI-O factory Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.449049 4838 factory.go:221] Registration of the crio container factory successfully Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.449163 4838 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.449197 4838 factory.go:103] Registering Raw factory Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.449223 4838 manager.go:1196] Started watching for new ooms in manager Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.450209 4838 manager.go:319] Starting recovery of all containers Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.450263 4838 server.go:460] "Adding debug handlers to kubelet server" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.458473 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.458576 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.458603 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.458650 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.458673 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.458693 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.458712 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.458734 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.458757 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.458776 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.458796 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.458846 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.458864 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.458889 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.458907 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.458926 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.458943 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.458963 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.458980 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.458998 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459016 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459037 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459054 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459072 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459090 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459109 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459130 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459150 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459168 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459186 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459206 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459224 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459241 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459261 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459279 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459296 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459315 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459334 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459351 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459368 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459386 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459403 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459420 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459439 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459455 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459473 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459492 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459511 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459574 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459597 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459674 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459704 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459733 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459755 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459776 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459797 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459818 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459836 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459856 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459876 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459896 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459914 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459938 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459959 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459976 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.459994 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.460011 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.460029 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.460047 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.460067 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.460085 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: E0202 10:53:26.453073 4838 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.241:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1890688c8cc8f23c default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 10:53:26.423294524 +0000 UTC m=+0.760395592,LastTimestamp:2026-02-02 10:53:26.423294524 +0000 UTC m=+0.760395592,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.469818 4838 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.470164 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.470400 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.470607 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.470906 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.471106 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.471279 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.471480 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.471664 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.476411 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.476777 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.477391 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.477769 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.477983 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.478253 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.478463 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.478732 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.479002 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.479209 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.479417 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.479654 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.479891 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.480084 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.480280 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.480436 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.480700 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.480925 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.481111 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.481325 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.481519 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.481734 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.481956 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.482165 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.482388 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.482688 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.482945 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.483358 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.483485 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.483563 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.483606 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.483660 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.483692 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.483718 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.483752 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.483774 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.483801 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.483823 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.483853 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.483874 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.483895 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.483924 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.483981 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.484007 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.484028 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.484049 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.484076 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.484097 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.484133 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.484153 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.484173 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.484201 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.484224 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.484253 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.484280 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.484300 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.484327 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.484351 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.484379 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485089 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485172 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485202 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485231 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485251 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485277 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485306 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485330 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485352 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485373 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485397 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485423 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485447 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485471 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485494 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485514 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485535 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485555 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485576 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485598 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485649 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485670 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485691 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485711 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485732 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485752 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485772 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485793 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485813 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485834 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485854 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485875 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485896 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485922 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485943 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485963 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.485983 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486004 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486025 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486049 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486102 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486135 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486163 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486187 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486275 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486309 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486335 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486363 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486386 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486434 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486474 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486507 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486528 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486558 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486578 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486612 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486676 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486703 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486727 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486766 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486793 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486822 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486846 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486866 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486892 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486924 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486953 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.486980 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.487003 4838 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.487025 4838 reconstruct.go:97] "Volume reconstruction finished" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.487040 4838 reconciler.go:26] "Reconciler: start to sync state" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.493829 4838 manager.go:324] Recovery completed Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.502642 4838 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.504452 4838 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.504521 4838 status_manager.go:217] "Starting to sync pod status with apiserver" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.504577 4838 kubelet.go:2335] "Starting kubelet main sync loop" Feb 02 10:53:26 crc kubenswrapper[4838]: E0202 10:53:26.504742 4838 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.513944 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.515599 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.515650 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.515661 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.518282 4838 cpu_manager.go:225] "Starting CPU manager" policy="none" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.518297 4838 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.518318 4838 state_mem.go:36] "Initialized new in-memory state store" Feb 02 10:53:26 crc kubenswrapper[4838]: W0202 10:53:26.529004 4838 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.241:6443: connect: connection refused Feb 02 10:53:26 crc kubenswrapper[4838]: E0202 10:53:26.529074 4838 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.241:6443: connect: connection refused" logger="UnhandledError" Feb 02 10:53:26 crc kubenswrapper[4838]: E0202 10:53:26.535728 4838 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.569686 4838 policy_none.go:49] "None policy: Start" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.571651 4838 memory_manager.go:170] "Starting memorymanager" policy="None" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.571681 4838 state_mem.go:35] "Initializing new in-memory state store" Feb 02 10:53:26 crc kubenswrapper[4838]: E0202 10:53:26.605824 4838 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Feb 02 10:53:26 crc kubenswrapper[4838]: E0202 10:53:26.636546 4838 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 02 10:53:26 crc kubenswrapper[4838]: E0202 10:53:26.645306 4838 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.241:6443: connect: connection refused" interval="400ms" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.645425 4838 manager.go:334] "Starting Device Plugin manager" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.645485 4838 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.645513 4838 server.go:79] "Starting device plugin registration server" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.646211 4838 eviction_manager.go:189] "Eviction manager: starting control loop" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.646242 4838 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.646489 4838 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.646652 4838 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.646674 4838 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Feb 02 10:53:26 crc kubenswrapper[4838]: E0202 10:53:26.660948 4838 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.747922 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.749422 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.749487 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.749506 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.749557 4838 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 10:53:26 crc kubenswrapper[4838]: E0202 10:53:26.750776 4838 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.241:6443: connect: connection refused" node="crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.806312 4838 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc"] Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.806515 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.808445 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.808515 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.808540 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.808854 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.809175 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.809236 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.810731 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.810794 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.810818 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.811080 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.811416 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.811503 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.812761 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.812813 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.812782 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.812864 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.812887 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.812835 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.813118 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.813157 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.813244 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.813768 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.813834 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.813886 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.817077 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.817125 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.817146 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.817174 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.817206 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.817242 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.818107 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.818155 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.819187 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.819229 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.819249 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.819251 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.822148 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.822402 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.824222 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.826140 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.826208 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.835407 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.835450 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.835464 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.892033 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.892076 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.892157 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.892205 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.892230 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.892263 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.892297 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.892325 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.892357 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.892388 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.892417 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.892468 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.892514 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.892544 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.892571 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.951241 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.952915 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.952981 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.953006 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.953044 4838 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 10:53:26 crc kubenswrapper[4838]: E0202 10:53:26.953734 4838 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.241:6443: connect: connection refused" node="crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994110 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994196 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994234 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994304 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994338 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994373 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994406 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994444 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994448 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994536 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994543 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994567 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994598 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994613 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994658 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994694 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994706 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994742 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994752 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994796 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994807 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994839 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994841 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994871 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994894 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994856 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994412 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994959 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.994982 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 10:53:26 crc kubenswrapper[4838]: I0202 10:53:26.995060 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 10:53:27 crc kubenswrapper[4838]: E0202 10:53:27.046771 4838 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.241:6443: connect: connection refused" interval="800ms" Feb 02 10:53:27 crc kubenswrapper[4838]: I0202 10:53:27.152270 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:53:27 crc kubenswrapper[4838]: I0202 10:53:27.163583 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 10:53:27 crc kubenswrapper[4838]: I0202 10:53:27.185612 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 10:53:27 crc kubenswrapper[4838]: I0202 10:53:27.207444 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 10:53:27 crc kubenswrapper[4838]: I0202 10:53:27.218568 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Feb 02 10:53:27 crc kubenswrapper[4838]: W0202 10:53:27.225913 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-fddaca5956ae201fc65178ec5bdb5449d00c1de8f5aa8352ac9052b69399600f WatchSource:0}: Error finding container fddaca5956ae201fc65178ec5bdb5449d00c1de8f5aa8352ac9052b69399600f: Status 404 returned error can't find the container with id fddaca5956ae201fc65178ec5bdb5449d00c1de8f5aa8352ac9052b69399600f Feb 02 10:53:27 crc kubenswrapper[4838]: W0202 10:53:27.233972 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-960a36be742c8824e27735ed78155009ebae4f58d81a48384ecaab6dc4bbc1f0 WatchSource:0}: Error finding container 960a36be742c8824e27735ed78155009ebae4f58d81a48384ecaab6dc4bbc1f0: Status 404 returned error can't find the container with id 960a36be742c8824e27735ed78155009ebae4f58d81a48384ecaab6dc4bbc1f0 Feb 02 10:53:27 crc kubenswrapper[4838]: W0202 10:53:27.239882 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-58ac18528a1ae03e6abe09a1f00460b5e4532d7d2ce4e3965747185833adbe05 WatchSource:0}: Error finding container 58ac18528a1ae03e6abe09a1f00460b5e4532d7d2ce4e3965747185833adbe05: Status 404 returned error can't find the container with id 58ac18528a1ae03e6abe09a1f00460b5e4532d7d2ce4e3965747185833adbe05 Feb 02 10:53:27 crc kubenswrapper[4838]: W0202 10:53:27.248671 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-a7c126c993d83d4fa49240e90abfe7de296d1496a1b382b0fef5f4538c057b57 WatchSource:0}: Error finding container a7c126c993d83d4fa49240e90abfe7de296d1496a1b382b0fef5f4538c057b57: Status 404 returned error can't find the container with id a7c126c993d83d4fa49240e90abfe7de296d1496a1b382b0fef5f4538c057b57 Feb 02 10:53:27 crc kubenswrapper[4838]: W0202 10:53:27.252121 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-a52e1d7bbd5e7880978a342cf77c9875081dca18f1675ac651dd67ea97c5943d WatchSource:0}: Error finding container a52e1d7bbd5e7880978a342cf77c9875081dca18f1675ac651dd67ea97c5943d: Status 404 returned error can't find the container with id a52e1d7bbd5e7880978a342cf77c9875081dca18f1675ac651dd67ea97c5943d Feb 02 10:53:27 crc kubenswrapper[4838]: I0202 10:53:27.354844 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:27 crc kubenswrapper[4838]: I0202 10:53:27.356840 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:27 crc kubenswrapper[4838]: I0202 10:53:27.356916 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:27 crc kubenswrapper[4838]: I0202 10:53:27.356935 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:27 crc kubenswrapper[4838]: I0202 10:53:27.356977 4838 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 10:53:27 crc kubenswrapper[4838]: E0202 10:53:27.357556 4838 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.241:6443: connect: connection refused" node="crc" Feb 02 10:53:27 crc kubenswrapper[4838]: W0202 10:53:27.378756 4838 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.241:6443: connect: connection refused Feb 02 10:53:27 crc kubenswrapper[4838]: E0202 10:53:27.378941 4838 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.241:6443: connect: connection refused" logger="UnhandledError" Feb 02 10:53:27 crc kubenswrapper[4838]: I0202 10:53:27.425277 4838 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.241:6443: connect: connection refused Feb 02 10:53:27 crc kubenswrapper[4838]: I0202 10:53:27.434306 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 17:31:52.999840733 +0000 UTC Feb 02 10:53:27 crc kubenswrapper[4838]: I0202 10:53:27.510514 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"fddaca5956ae201fc65178ec5bdb5449d00c1de8f5aa8352ac9052b69399600f"} Feb 02 10:53:27 crc kubenswrapper[4838]: I0202 10:53:27.512201 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a52e1d7bbd5e7880978a342cf77c9875081dca18f1675ac651dd67ea97c5943d"} Feb 02 10:53:27 crc kubenswrapper[4838]: I0202 10:53:27.514072 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"a7c126c993d83d4fa49240e90abfe7de296d1496a1b382b0fef5f4538c057b57"} Feb 02 10:53:27 crc kubenswrapper[4838]: I0202 10:53:27.517883 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"58ac18528a1ae03e6abe09a1f00460b5e4532d7d2ce4e3965747185833adbe05"} Feb 02 10:53:27 crc kubenswrapper[4838]: I0202 10:53:27.521112 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"960a36be742c8824e27735ed78155009ebae4f58d81a48384ecaab6dc4bbc1f0"} Feb 02 10:53:27 crc kubenswrapper[4838]: W0202 10:53:27.611180 4838 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.241:6443: connect: connection refused Feb 02 10:53:27 crc kubenswrapper[4838]: E0202 10:53:27.611297 4838 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.241:6443: connect: connection refused" logger="UnhandledError" Feb 02 10:53:27 crc kubenswrapper[4838]: W0202 10:53:27.730214 4838 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.241:6443: connect: connection refused Feb 02 10:53:27 crc kubenswrapper[4838]: E0202 10:53:27.730296 4838 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.241:6443: connect: connection refused" logger="UnhandledError" Feb 02 10:53:27 crc kubenswrapper[4838]: E0202 10:53:27.848297 4838 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.241:6443: connect: connection refused" interval="1.6s" Feb 02 10:53:28 crc kubenswrapper[4838]: W0202 10:53:28.001259 4838 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.241:6443: connect: connection refused Feb 02 10:53:28 crc kubenswrapper[4838]: E0202 10:53:28.001368 4838 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.241:6443: connect: connection refused" logger="UnhandledError" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.157717 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.159391 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.159448 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.159471 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.159515 4838 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 10:53:28 crc kubenswrapper[4838]: E0202 10:53:28.160105 4838 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.241:6443: connect: connection refused" node="crc" Feb 02 10:53:28 crc kubenswrapper[4838]: E0202 10:53:28.211859 4838 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.241:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1890688c8cc8f23c default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 10:53:26.423294524 +0000 UTC m=+0.760395592,LastTimestamp:2026-02-02 10:53:26.423294524 +0000 UTC m=+0.760395592,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.319820 4838 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 02 10:53:28 crc kubenswrapper[4838]: E0202 10:53:28.320590 4838 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.241:6443: connect: connection refused" logger="UnhandledError" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.425270 4838 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.241:6443: connect: connection refused Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.434680 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 03:51:40.683352437 +0000 UTC Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.526459 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590"} Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.528362 4838 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97" exitCode=0 Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.528416 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97"} Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.528492 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.529884 4838 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="705e10cc53e4009b6e79f999f4a645ef1e2c8208aa47d840bf07fa872a04bd1e" exitCode=0 Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.529946 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"705e10cc53e4009b6e79f999f4a645ef1e2c8208aa47d840bf07fa872a04bd1e"} Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.529982 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.530330 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.530369 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.530383 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.531072 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.531104 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.531120 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.532100 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.532987 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.533027 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.533043 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.533048 4838 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="af5d50ad3a14334ecb762074791d8b64727d86e69438375b3067b12d4588d93b" exitCode=0 Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.533098 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"af5d50ad3a14334ecb762074791d8b64727d86e69438375b3067b12d4588d93b"} Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.533163 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.537036 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.537073 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.537089 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.539915 4838 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672" exitCode=0 Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.539968 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672"} Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.539996 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.541189 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.541232 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:28 crc kubenswrapper[4838]: I0202 10:53:28.541251 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.425411 4838 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.241:6443: connect: connection refused Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.434876 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 04:31:01.438616217 +0000 UTC Feb 02 10:53:29 crc kubenswrapper[4838]: E0202 10:53:29.449463 4838 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.241:6443: connect: connection refused" interval="3.2s" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.546586 4838 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="b270f516e2d73dc3f592bfba7143744457adc7b72bca5c2abe2deafa1314aa4f" exitCode=0 Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.546702 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"b270f516e2d73dc3f592bfba7143744457adc7b72bca5c2abe2deafa1314aa4f"} Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.546870 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.548697 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.548737 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.548748 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.549105 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"385e32753a69517e12bf4bd0ceb5bbb13e6cd79cf0f7df5ef20dfd36f8f4bac0"} Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.549209 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.550051 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.550069 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.550080 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.553770 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"74ee92ab7d9ad96ff606c9e549e3d99a3602fd91c10a4ec7ebeb07932825d521"} Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.553947 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9265e112e7858143bc9067c7d9b1d00cab82fc64a1e306c175d620d699c94a36"} Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.553979 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"1bae30f81dbb217dd45987bdd5ba01a01d13b8c602153c734f27336412c5a397"} Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.554079 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.555449 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.555487 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.555498 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.559121 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006"} Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.559165 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb"} Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.559181 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8"} Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.559260 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.560314 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.560343 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.560353 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.566317 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d"} Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.566345 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400"} Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.566356 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a"} Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.761299 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.766700 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.766747 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.766781 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:29 crc kubenswrapper[4838]: I0202 10:53:29.766814 4838 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 10:53:29 crc kubenswrapper[4838]: E0202 10:53:29.767320 4838 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.241:6443: connect: connection refused" node="crc" Feb 02 10:53:29 crc kubenswrapper[4838]: W0202 10:53:29.785157 4838 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.241:6443: connect: connection refused Feb 02 10:53:29 crc kubenswrapper[4838]: E0202 10:53:29.785223 4838 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.241:6443: connect: connection refused" logger="UnhandledError" Feb 02 10:53:30 crc kubenswrapper[4838]: W0202 10:53:30.233337 4838 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.241:6443: connect: connection refused Feb 02 10:53:30 crc kubenswrapper[4838]: E0202 10:53:30.233440 4838 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.241:6443: connect: connection refused" logger="UnhandledError" Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.434999 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 04:14:31.026433842 +0000 UTC Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.573190 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0"} Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.573239 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06"} Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.573320 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.574771 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.574817 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.574835 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.576790 4838 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="cb7ef8802965fb427ad66d09413efb6862bfcc9e270b614da17837b430ba32c6" exitCode=0 Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.576872 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"cb7ef8802965fb427ad66d09413efb6862bfcc9e270b614da17837b430ba32c6"} Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.576897 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.577144 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.577158 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.577292 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.577667 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.578291 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.578350 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.578368 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.578489 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.578526 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.578526 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.578538 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.578558 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.578575 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.580307 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.580339 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:30 crc kubenswrapper[4838]: I0202 10:53:30.580356 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:31 crc kubenswrapper[4838]: I0202 10:53:31.435222 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 15:47:46.883982618 +0000 UTC Feb 02 10:53:31 crc kubenswrapper[4838]: I0202 10:53:31.585883 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7b68b5aaa1d9a1344a73ba84c68e737e7827e6f900dce380047a6c045eb1b174"} Feb 02 10:53:31 crc kubenswrapper[4838]: I0202 10:53:31.586014 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:31 crc kubenswrapper[4838]: I0202 10:53:31.586032 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:31 crc kubenswrapper[4838]: I0202 10:53:31.586076 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b15657beb3bdd7ab6d9c2ec0e7048979e1a9ac7424dcdd0044deb929b7e2724e"} Feb 02 10:53:31 crc kubenswrapper[4838]: I0202 10:53:31.586116 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"3b929ebdec56117e63a9dd12ea9ddbf006a31aa04a9646e5ca22a1f59928dd70"} Feb 02 10:53:31 crc kubenswrapper[4838]: I0202 10:53:31.586151 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:53:31 crc kubenswrapper[4838]: I0202 10:53:31.587565 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:31 crc kubenswrapper[4838]: I0202 10:53:31.587649 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:31 crc kubenswrapper[4838]: I0202 10:53:31.587670 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:31 crc kubenswrapper[4838]: I0202 10:53:31.587688 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:31 crc kubenswrapper[4838]: I0202 10:53:31.587736 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:31 crc kubenswrapper[4838]: I0202 10:53:31.587777 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:31 crc kubenswrapper[4838]: I0202 10:53:31.978894 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 10:53:31 crc kubenswrapper[4838]: I0202 10:53:31.979438 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:31 crc kubenswrapper[4838]: I0202 10:53:31.981460 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:31 crc kubenswrapper[4838]: I0202 10:53:31.981525 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:31 crc kubenswrapper[4838]: I0202 10:53:31.981543 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:32 crc kubenswrapper[4838]: I0202 10:53:32.436307 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 19:10:56.772549965 +0000 UTC Feb 02 10:53:32 crc kubenswrapper[4838]: I0202 10:53:32.470688 4838 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 02 10:53:32 crc kubenswrapper[4838]: I0202 10:53:32.596254 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:32 crc kubenswrapper[4838]: I0202 10:53:32.596246 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a7f61b6833337c5b93efcf7c46817c0d7c6ef29178a7ae1bf8c1990b58c87857"} Feb 02 10:53:32 crc kubenswrapper[4838]: I0202 10:53:32.596416 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"dcb5cc93caff3b7c35ed06e3990c04acacd9ec59f39b7a91d9c03484f85ef7a1"} Feb 02 10:53:32 crc kubenswrapper[4838]: I0202 10:53:32.596470 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:32 crc kubenswrapper[4838]: I0202 10:53:32.597728 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:32 crc kubenswrapper[4838]: I0202 10:53:32.597794 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:32 crc kubenswrapper[4838]: I0202 10:53:32.597813 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:32 crc kubenswrapper[4838]: I0202 10:53:32.598775 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:32 crc kubenswrapper[4838]: I0202 10:53:32.598833 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:32 crc kubenswrapper[4838]: I0202 10:53:32.598850 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:32 crc kubenswrapper[4838]: I0202 10:53:32.968290 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:32 crc kubenswrapper[4838]: I0202 10:53:32.969968 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:32 crc kubenswrapper[4838]: I0202 10:53:32.970069 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:32 crc kubenswrapper[4838]: I0202 10:53:32.970090 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:32 crc kubenswrapper[4838]: I0202 10:53:32.970128 4838 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 10:53:33 crc kubenswrapper[4838]: I0202 10:53:33.308264 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:53:33 crc kubenswrapper[4838]: I0202 10:53:33.437110 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 05:44:23.580783574 +0000 UTC Feb 02 10:53:33 crc kubenswrapper[4838]: I0202 10:53:33.549555 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:53:33 crc kubenswrapper[4838]: I0202 10:53:33.599703 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:33 crc kubenswrapper[4838]: I0202 10:53:33.599817 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:33 crc kubenswrapper[4838]: I0202 10:53:33.601150 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:33 crc kubenswrapper[4838]: I0202 10:53:33.601207 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:33 crc kubenswrapper[4838]: I0202 10:53:33.601225 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:33 crc kubenswrapper[4838]: I0202 10:53:33.601354 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:33 crc kubenswrapper[4838]: I0202 10:53:33.601429 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:33 crc kubenswrapper[4838]: I0202 10:53:33.601453 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:34 crc kubenswrapper[4838]: I0202 10:53:34.089267 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Feb 02 10:53:34 crc kubenswrapper[4838]: I0202 10:53:34.437479 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 13:22:10.972703686 +0000 UTC Feb 02 10:53:34 crc kubenswrapper[4838]: I0202 10:53:34.602601 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:34 crc kubenswrapper[4838]: I0202 10:53:34.602673 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:34 crc kubenswrapper[4838]: I0202 10:53:34.604298 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:34 crc kubenswrapper[4838]: I0202 10:53:34.604326 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:34 crc kubenswrapper[4838]: I0202 10:53:34.604338 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:34 crc kubenswrapper[4838]: I0202 10:53:34.604694 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:34 crc kubenswrapper[4838]: I0202 10:53:34.604783 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:34 crc kubenswrapper[4838]: I0202 10:53:34.604809 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:34 crc kubenswrapper[4838]: I0202 10:53:34.980734 4838 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 02 10:53:34 crc kubenswrapper[4838]: I0202 10:53:34.980835 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 02 10:53:35 crc kubenswrapper[4838]: I0202 10:53:35.327318 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 10:53:35 crc kubenswrapper[4838]: I0202 10:53:35.327510 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:35 crc kubenswrapper[4838]: I0202 10:53:35.329333 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:35 crc kubenswrapper[4838]: I0202 10:53:35.329399 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:35 crc kubenswrapper[4838]: I0202 10:53:35.329420 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:35 crc kubenswrapper[4838]: I0202 10:53:35.438450 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 23:53:04.047572202 +0000 UTC Feb 02 10:53:36 crc kubenswrapper[4838]: I0202 10:53:36.439005 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 17:23:12.420755141 +0000 UTC Feb 02 10:53:36 crc kubenswrapper[4838]: E0202 10:53:36.661280 4838 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 02 10:53:37 crc kubenswrapper[4838]: I0202 10:53:37.195740 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 10:53:37 crc kubenswrapper[4838]: I0202 10:53:37.195943 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:37 crc kubenswrapper[4838]: I0202 10:53:37.197533 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:37 crc kubenswrapper[4838]: I0202 10:53:37.197660 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:37 crc kubenswrapper[4838]: I0202 10:53:37.197681 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:37 crc kubenswrapper[4838]: I0202 10:53:37.202916 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 10:53:37 crc kubenswrapper[4838]: I0202 10:53:37.439598 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 18:47:34.618662521 +0000 UTC Feb 02 10:53:37 crc kubenswrapper[4838]: I0202 10:53:37.611890 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:37 crc kubenswrapper[4838]: I0202 10:53:37.612062 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 10:53:37 crc kubenswrapper[4838]: I0202 10:53:37.613430 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:37 crc kubenswrapper[4838]: I0202 10:53:37.613529 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:37 crc kubenswrapper[4838]: I0202 10:53:37.613552 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:38 crc kubenswrapper[4838]: I0202 10:53:38.324046 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 10:53:38 crc kubenswrapper[4838]: I0202 10:53:38.440744 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 10:43:45.120717766 +0000 UTC Feb 02 10:53:38 crc kubenswrapper[4838]: I0202 10:53:38.615587 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:38 crc kubenswrapper[4838]: I0202 10:53:38.617011 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:38 crc kubenswrapper[4838]: I0202 10:53:38.617078 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:38 crc kubenswrapper[4838]: I0202 10:53:38.617101 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:39 crc kubenswrapper[4838]: I0202 10:53:39.441129 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 11:53:07.796657287 +0000 UTC Feb 02 10:53:39 crc kubenswrapper[4838]: I0202 10:53:39.618075 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:39 crc kubenswrapper[4838]: I0202 10:53:39.619071 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:39 crc kubenswrapper[4838]: I0202 10:53:39.619114 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:39 crc kubenswrapper[4838]: I0202 10:53:39.619126 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:40 crc kubenswrapper[4838]: I0202 10:53:40.426798 4838 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Feb 02 10:53:40 crc kubenswrapper[4838]: I0202 10:53:40.441291 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 00:49:24.734976286 +0000 UTC Feb 02 10:53:40 crc kubenswrapper[4838]: W0202 10:53:40.479467 4838 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Feb 02 10:53:40 crc kubenswrapper[4838]: I0202 10:53:40.479597 4838 trace.go:236] Trace[243532734]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Feb-2026 10:53:30.478) (total time: 10001ms): Feb 02 10:53:40 crc kubenswrapper[4838]: Trace[243532734]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (10:53:40.479) Feb 02 10:53:40 crc kubenswrapper[4838]: Trace[243532734]: [10.001227668s] [10.001227668s] END Feb 02 10:53:40 crc kubenswrapper[4838]: E0202 10:53:40.479666 4838 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Feb 02 10:53:40 crc kubenswrapper[4838]: I0202 10:53:40.811038 4838 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Feb 02 10:53:40 crc kubenswrapper[4838]: I0202 10:53:40.811164 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Feb 02 10:53:40 crc kubenswrapper[4838]: I0202 10:53:40.823993 4838 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Feb 02 10:53:40 crc kubenswrapper[4838]: I0202 10:53:40.824076 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Feb 02 10:53:41 crc kubenswrapper[4838]: I0202 10:53:41.441815 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 03:36:06.96912471 +0000 UTC Feb 02 10:53:42 crc kubenswrapper[4838]: I0202 10:53:42.138033 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Feb 02 10:53:42 crc kubenswrapper[4838]: I0202 10:53:42.138272 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:42 crc kubenswrapper[4838]: I0202 10:53:42.139739 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:42 crc kubenswrapper[4838]: I0202 10:53:42.139790 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:42 crc kubenswrapper[4838]: I0202 10:53:42.139801 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:42 crc kubenswrapper[4838]: I0202 10:53:42.201048 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Feb 02 10:53:42 crc kubenswrapper[4838]: I0202 10:53:42.442964 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 07:28:20.393207301 +0000 UTC Feb 02 10:53:42 crc kubenswrapper[4838]: I0202 10:53:42.626932 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:42 crc kubenswrapper[4838]: I0202 10:53:42.628572 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:42 crc kubenswrapper[4838]: I0202 10:53:42.628657 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:42 crc kubenswrapper[4838]: I0202 10:53:42.628676 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:42 crc kubenswrapper[4838]: I0202 10:53:42.646288 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Feb 02 10:53:43 crc kubenswrapper[4838]: I0202 10:53:43.316772 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:53:43 crc kubenswrapper[4838]: I0202 10:53:43.317031 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:43 crc kubenswrapper[4838]: I0202 10:53:43.318670 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:43 crc kubenswrapper[4838]: I0202 10:53:43.318734 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:43 crc kubenswrapper[4838]: I0202 10:53:43.318752 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:43 crc kubenswrapper[4838]: I0202 10:53:43.324409 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:53:43 crc kubenswrapper[4838]: I0202 10:53:43.443262 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 23:32:48.739264231 +0000 UTC Feb 02 10:53:43 crc kubenswrapper[4838]: I0202 10:53:43.628974 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:43 crc kubenswrapper[4838]: I0202 10:53:43.629042 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:43 crc kubenswrapper[4838]: I0202 10:53:43.630035 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:43 crc kubenswrapper[4838]: I0202 10:53:43.630085 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:43 crc kubenswrapper[4838]: I0202 10:53:43.630103 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:43 crc kubenswrapper[4838]: I0202 10:53:43.630117 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:43 crc kubenswrapper[4838]: I0202 10:53:43.630172 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:43 crc kubenswrapper[4838]: I0202 10:53:43.630198 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:44 crc kubenswrapper[4838]: I0202 10:53:44.443964 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 14:06:05.489548916 +0000 UTC Feb 02 10:53:44 crc kubenswrapper[4838]: I0202 10:53:44.980108 4838 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 02 10:53:44 crc kubenswrapper[4838]: I0202 10:53:44.980181 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 02 10:53:45 crc kubenswrapper[4838]: I0202 10:53:45.445653 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 04:33:02.059326525 +0000 UTC Feb 02 10:53:45 crc kubenswrapper[4838]: E0202 10:53:45.800827 4838 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Feb 02 10:53:45 crc kubenswrapper[4838]: I0202 10:53:45.803412 4838 trace.go:236] Trace[1348963952]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Feb-2026 10:53:34.293) (total time: 11509ms): Feb 02 10:53:45 crc kubenswrapper[4838]: Trace[1348963952]: ---"Objects listed" error: 11509ms (10:53:45.803) Feb 02 10:53:45 crc kubenswrapper[4838]: Trace[1348963952]: [11.509553466s] [11.509553466s] END Feb 02 10:53:45 crc kubenswrapper[4838]: I0202 10:53:45.803450 4838 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Feb 02 10:53:45 crc kubenswrapper[4838]: I0202 10:53:45.805513 4838 trace.go:236] Trace[1738574342]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Feb-2026 10:53:30.816) (total time: 14989ms): Feb 02 10:53:45 crc kubenswrapper[4838]: Trace[1738574342]: ---"Objects listed" error: 14988ms (10:53:45.804) Feb 02 10:53:45 crc kubenswrapper[4838]: Trace[1738574342]: [14.989441926s] [14.989441926s] END Feb 02 10:53:45 crc kubenswrapper[4838]: I0202 10:53:45.805546 4838 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Feb 02 10:53:45 crc kubenswrapper[4838]: E0202 10:53:45.806399 4838 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Feb 02 10:53:45 crc kubenswrapper[4838]: I0202 10:53:45.811757 4838 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Feb 02 10:53:45 crc kubenswrapper[4838]: I0202 10:53:45.813398 4838 trace.go:236] Trace[1305624570]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Feb-2026 10:53:35.133) (total time: 10679ms): Feb 02 10:53:45 crc kubenswrapper[4838]: Trace[1305624570]: ---"Objects listed" error: 10679ms (10:53:45.812) Feb 02 10:53:45 crc kubenswrapper[4838]: Trace[1305624570]: [10.679655442s] [10.679655442s] END Feb 02 10:53:45 crc kubenswrapper[4838]: I0202 10:53:45.813435 4838 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Feb 02 10:53:45 crc kubenswrapper[4838]: I0202 10:53:45.815325 4838 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Feb 02 10:53:46 crc kubenswrapper[4838]: I0202 10:53:46.108308 4838 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:57490->192.168.126.11:17697: read: connection reset by peer" start-of-body= Feb 02 10:53:46 crc kubenswrapper[4838]: I0202 10:53:46.108407 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:57490->192.168.126.11:17697: read: connection reset by peer" Feb 02 10:53:46 crc kubenswrapper[4838]: I0202 10:53:46.108666 4838 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:57494->192.168.126.11:17697: read: connection reset by peer" start-of-body= Feb 02 10:53:46 crc kubenswrapper[4838]: I0202 10:53:46.108800 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:57494->192.168.126.11:17697: read: connection reset by peer" Feb 02 10:53:46 crc kubenswrapper[4838]: I0202 10:53:46.108894 4838 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Feb 02 10:53:46 crc kubenswrapper[4838]: I0202 10:53:46.108933 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Feb 02 10:53:46 crc kubenswrapper[4838]: I0202 10:53:46.446578 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 10:22:21.116299008 +0000 UTC Feb 02 10:53:46 crc kubenswrapper[4838]: I0202 10:53:46.449134 4838 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Feb 02 10:53:46 crc kubenswrapper[4838]: I0202 10:53:46.640090 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 02 10:53:46 crc kubenswrapper[4838]: I0202 10:53:46.642864 4838 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0" exitCode=255 Feb 02 10:53:46 crc kubenswrapper[4838]: I0202 10:53:46.642926 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0"} Feb 02 10:53:46 crc kubenswrapper[4838]: I0202 10:53:46.672319 4838 scope.go:117] "RemoveContainer" containerID="7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.425865 4838 apiserver.go:52] "Watching apiserver" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.430731 4838 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.431236 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-apiserver/kube-apiserver-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h"] Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.431709 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.431802 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:53:47 crc kubenswrapper[4838]: E0202 10:53:47.431882 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.431952 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:53:47 crc kubenswrapper[4838]: E0202 10:53:47.432039 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.432082 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.432569 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:53:47 crc kubenswrapper[4838]: E0202 10:53:47.432636 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.432542 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.434344 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.434945 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.435173 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.435395 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.435774 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.435980 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.436752 4838 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.438241 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.438400 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.438689 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.447947 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 10:00:21.876618872 +0000 UTC Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.479995 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.500198 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.519036 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.521003 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.521189 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.521247 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.521296 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.521390 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.521485 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.522174 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.522484 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.522540 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.522584 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.522660 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.522704 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.522743 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.522787 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.522836 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.522880 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.522973 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.522994 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523023 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523071 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523119 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523162 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523210 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523257 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523276 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523303 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523409 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523459 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523459 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523493 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523581 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523613 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523680 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523715 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523750 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523783 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523818 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523850 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523836 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523881 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.523947 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.524004 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.524044 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.524050 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.524076 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.524154 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.522526 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.524647 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.524693 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.524720 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.524775 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.524846 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.524901 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.525099 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.525157 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.525210 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.525257 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.525304 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.525350 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.525396 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.525439 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.525480 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.525521 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.525564 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.525608 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.525689 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.525736 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.525780 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.525826 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.525874 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.525924 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.525968 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.526013 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.526057 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.526099 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.526142 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.526187 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.526232 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.526276 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.526324 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.526369 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.526418 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.526462 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.526506 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.526556 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.526602 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.526683 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.526734 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.531474 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.531547 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.531598 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.533412 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.533480 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.533532 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.533584 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.533663 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.533710 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.533759 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.533813 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.524841 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.533865 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.533921 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.524961 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.524981 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.525015 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.525160 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.526100 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.526274 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.526568 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.527227 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.534098 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.527424 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.527879 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.528394 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.528409 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.528494 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.528683 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.528857 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.529071 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.529229 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.529243 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.529598 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.530014 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.530231 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.530866 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.530897 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.531097 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.531190 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.532202 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.532245 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.532421 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.532774 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.533030 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.533438 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.533492 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.533817 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.534033 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.534732 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.534178 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.534720 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.534837 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.535199 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: E0202 10:53:47.535293 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:53:48.035275588 +0000 UTC m=+22.372376616 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.535437 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.533969 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.535674 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.535851 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.536054 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.536198 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.536455 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.536689 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.536770 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.536775 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.537251 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.537411 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.537449 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.537532 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.537674 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.537702 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.537878 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.535718 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.538006 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.538063 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.538116 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.538163 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.538529 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.538566 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.538577 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.538768 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.538834 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.539039 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.539074 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.538762 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.539202 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.539355 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.539384 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.539439 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.539485 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.539606 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.539749 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.539807 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.539855 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.539900 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.539943 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.539990 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540039 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540085 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540092 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540136 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540185 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540233 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540283 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540330 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540376 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540428 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540479 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540530 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540580 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540662 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540714 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540762 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540810 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540836 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540859 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540907 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540955 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540980 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540978 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.541155 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.541344 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.541382 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.541815 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.541901 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.542044 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.542062 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.542105 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.542239 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.542191 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.542316 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.542322 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.542601 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.542502 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.542908 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.543018 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.543052 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.543169 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.543216 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.543430 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.544033 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.544207 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.544276 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.544344 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.544496 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.544776 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.544868 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.544925 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.544989 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.545035 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.545079 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.545124 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.545171 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.545569 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.545671 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.546754 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.546820 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.546874 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.546929 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.546998 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.547052 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.547103 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.547157 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.547209 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.547244 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.547279 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.547331 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.547376 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.547421 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.547470 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.547518 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.547567 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.547614 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.547712 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.547771 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.547821 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.547868 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.547925 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.547973 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.548174 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.548249 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.548304 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.548355 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.548402 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.548451 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.548498 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.548546 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.548579 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.548610 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.548708 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.548756 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.548805 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.548853 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.548887 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.548921 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.548955 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.548987 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.549022 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.549244 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.549303 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.549355 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.549404 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.549453 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.549489 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.549532 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.549662 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.549730 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.549767 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.549803 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.549841 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.549879 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.549915 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.549956 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550004 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550058 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550110 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550164 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550219 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550268 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550407 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550442 4838 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550474 4838 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550501 4838 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550528 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550555 4838 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550580 4838 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550604 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550668 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550696 4838 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550719 4838 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550745 4838 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550770 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550796 4838 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550824 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550851 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550883 4838 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550910 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550939 4838 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550967 4838 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.551026 4838 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.551055 4838 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.552828 4838 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.552864 4838 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.552895 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.552923 4838 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.552950 4838 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.552982 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553011 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553043 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553081 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553109 4838 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553137 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553164 4838 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553190 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553221 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553252 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553282 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553305 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553328 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553349 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553369 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553390 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553410 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553431 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553445 4838 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553789 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.555324 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.555834 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553455 4838 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.540647 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.544717 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.544827 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550703 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550749 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550818 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.550962 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.558961 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.551211 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.551529 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.551792 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.551953 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.552104 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.552269 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553027 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553215 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553441 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553483 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553498 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.553569 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.554166 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.554193 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: E0202 10:53:47.554331 4838 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 10:53:47 crc kubenswrapper[4838]: E0202 10:53:47.559393 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 10:53:48.059358003 +0000 UTC m=+22.396459081 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 10:53:47 crc kubenswrapper[4838]: E0202 10:53:47.554400 4838 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.560031 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: E0202 10:53:47.560469 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 10:53:48.060432061 +0000 UTC m=+22.397533129 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.554710 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.554814 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.555769 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.556075 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.557251 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.557756 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.557955 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.558738 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.558803 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.560704 4838 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.560752 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.560772 4838 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.560790 4838 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.560809 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.560830 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.560854 4838 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.560874 4838 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.560892 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.560909 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.560927 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.560944 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.560965 4838 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.561614 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.561701 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.562045 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.562228 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.562585 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.562699 4838 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.562839 4838 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.562861 4838 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.562882 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.562906 4838 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.562927 4838 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.562947 4838 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.562967 4838 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.562986 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563032 4838 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563052 4838 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563072 4838 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563090 4838 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563110 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563128 4838 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563148 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563167 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563184 4838 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563203 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563221 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563238 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563256 4838 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563273 4838 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563293 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563312 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563331 4838 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563349 4838 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563368 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563386 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563405 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563423 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563441 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563461 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563479 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563498 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563517 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.563542 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.566730 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.566765 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.566837 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.567214 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.567669 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.568069 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.568242 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.568571 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.569728 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.570800 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.579205 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.579257 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.580188 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.580285 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.580340 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.580916 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.581110 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.581665 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.581669 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.582104 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.582109 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.582702 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: E0202 10:53:47.584247 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 10:53:47 crc kubenswrapper[4838]: E0202 10:53:47.584276 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 10:53:47 crc kubenswrapper[4838]: E0202 10:53:47.584293 4838 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:53:47 crc kubenswrapper[4838]: E0202 10:53:47.584365 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 10:53:48.084341071 +0000 UTC m=+22.421442209 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.586088 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.586292 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.586701 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 10:53:47 crc kubenswrapper[4838]: E0202 10:53:47.588116 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 10:53:47 crc kubenswrapper[4838]: E0202 10:53:47.588146 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 10:53:47 crc kubenswrapper[4838]: E0202 10:53:47.588162 4838 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:53:47 crc kubenswrapper[4838]: E0202 10:53:47.588211 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 10:53:48.088195902 +0000 UTC m=+22.425296940 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.588208 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.594236 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.594367 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.595514 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.597587 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.597754 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.597760 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.597816 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.598874 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.598964 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.599209 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.599434 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.599397 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.599911 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.599788 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.600404 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.599934 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.600109 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.600653 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.600157 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.600200 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.600410 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.600526 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.600764 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.601302 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.601746 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.601900 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.602753 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.602875 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.604181 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.604389 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.604868 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.605136 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.605302 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.605709 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.606000 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.607313 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.610458 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.610721 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.610856 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.613552 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.626063 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.629728 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.636193 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.638162 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.646758 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.648124 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.650279 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740"} Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.650733 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.656652 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.664843 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.664884 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.664938 4838 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.664949 4838 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.664959 4838 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.664966 4838 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.664976 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.664985 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.664993 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665000 4838 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665008 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665003 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665054 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665016 4838 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665137 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665170 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665203 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665232 4838 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665465 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665508 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665534 4838 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665554 4838 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665571 4838 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665603 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665658 4838 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665685 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665710 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665727 4838 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665743 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665760 4838 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665777 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665794 4838 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665811 4838 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665828 4838 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665847 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665864 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665881 4838 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665898 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665915 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665931 4838 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665949 4838 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665965 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665981 4838 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.665998 4838 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666016 4838 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666032 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666049 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666066 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666084 4838 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666100 4838 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666116 4838 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666133 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666152 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666172 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666193 4838 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666216 4838 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666240 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666258 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666284 4838 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666309 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666332 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666356 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666381 4838 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666404 4838 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666426 4838 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666447 4838 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666468 4838 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666490 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666511 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666534 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666559 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666581 4838 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666603 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666659 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666683 4838 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666706 4838 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666729 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666751 4838 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666774 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666796 4838 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666817 4838 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666839 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666862 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666884 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666909 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666936 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666964 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.666988 4838 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.667011 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.667034 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.667057 4838 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.667080 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.667102 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.667128 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.667151 4838 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.667177 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.667234 4838 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.667261 4838 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.667284 4838 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.667594 4838 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.672046 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.672080 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.668078 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.687374 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.697937 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.733322 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.743920 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.755144 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.765861 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.776801 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.780865 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 10:53:47 crc kubenswrapper[4838]: I0202 10:53:47.811673 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.078485 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.078588 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.078639 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:53:48 crc kubenswrapper[4838]: E0202 10:53:48.078753 4838 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 10:53:48 crc kubenswrapper[4838]: E0202 10:53:48.078767 4838 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 10:53:48 crc kubenswrapper[4838]: E0202 10:53:48.078867 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:53:49.078845869 +0000 UTC m=+23.415946907 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:53:48 crc kubenswrapper[4838]: E0202 10:53:48.078924 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 10:53:49.07888111 +0000 UTC m=+23.415982188 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 10:53:48 crc kubenswrapper[4838]: E0202 10:53:48.078980 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 10:53:49.078959222 +0000 UTC m=+23.416060360 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.180096 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.180201 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:53:48 crc kubenswrapper[4838]: E0202 10:53:48.180348 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 10:53:48 crc kubenswrapper[4838]: E0202 10:53:48.180371 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 10:53:48 crc kubenswrapper[4838]: E0202 10:53:48.180387 4838 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:53:48 crc kubenswrapper[4838]: E0202 10:53:48.180448 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 10:53:49.180428905 +0000 UTC m=+23.517529953 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:53:48 crc kubenswrapper[4838]: E0202 10:53:48.180936 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 10:53:48 crc kubenswrapper[4838]: E0202 10:53:48.180964 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 10:53:48 crc kubenswrapper[4838]: E0202 10:53:48.180979 4838 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:53:48 crc kubenswrapper[4838]: E0202 10:53:48.181024 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 10:53:49.181008991 +0000 UTC m=+23.518110039 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.448494 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 13:52:57.074149909 +0000 UTC Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.505290 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:53:48 crc kubenswrapper[4838]: E0202 10:53:48.505452 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.511608 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.512670 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.514870 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.516218 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.517449 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.518499 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.519812 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.520973 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.522230 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.523331 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.524366 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.528522 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.529717 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.531496 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.532544 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.534399 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.536004 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.537088 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.539117 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.540342 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.542348 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.543554 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.544521 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.546671 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.547585 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.550008 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.551421 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.553259 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.554420 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.557372 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.558343 4838 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.558547 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.561996 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.563855 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.564697 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.567839 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.570060 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.571090 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.573267 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.574187 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.575288 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.576072 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.577368 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.578212 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.579325 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.580038 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.581483 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.582647 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.583928 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.584676 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.585593 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.586923 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.587685 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.588857 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.653075 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"4d6c362506d94f01f7886b429beb13bd13f680ee3d8c7ce6ee378c43a0ed46c7"} Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.655450 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7"} Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.655483 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78"} Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.655497 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"5779af513660ed20e457fb4d4f5f90eb1fd143fcbf702f251d14555abbf2863e"} Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.656973 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d"} Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.657011 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"d9970082f409ce0c9b4626d005d36c9321eb2c5ad6a36fd2f1eb3470356b58b6"} Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.671880 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:48Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.688108 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:48Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.709895 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:48Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.729699 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:48Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.759271 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:48Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.777077 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:48Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.795787 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:48Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.812852 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:48Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.830893 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:48Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.851994 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:48Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.876910 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:48Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.895877 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:48Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.928802 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:48Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:48 crc kubenswrapper[4838]: I0202 10:53:48.949648 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:48Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:49 crc kubenswrapper[4838]: I0202 10:53:49.090262 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:53:49 crc kubenswrapper[4838]: I0202 10:53:49.090398 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:53:49 crc kubenswrapper[4838]: E0202 10:53:49.090490 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:53:51.090455281 +0000 UTC m=+25.427556349 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:53:49 crc kubenswrapper[4838]: E0202 10:53:49.090502 4838 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 10:53:49 crc kubenswrapper[4838]: E0202 10:53:49.090580 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 10:53:51.090566364 +0000 UTC m=+25.427667432 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 10:53:49 crc kubenswrapper[4838]: I0202 10:53:49.090645 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:53:49 crc kubenswrapper[4838]: E0202 10:53:49.090729 4838 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 10:53:49 crc kubenswrapper[4838]: E0202 10:53:49.090799 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 10:53:51.090780479 +0000 UTC m=+25.427881547 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 10:53:49 crc kubenswrapper[4838]: I0202 10:53:49.191719 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:53:49 crc kubenswrapper[4838]: I0202 10:53:49.191826 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:53:49 crc kubenswrapper[4838]: E0202 10:53:49.191990 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 10:53:49 crc kubenswrapper[4838]: E0202 10:53:49.192018 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 10:53:49 crc kubenswrapper[4838]: E0202 10:53:49.192039 4838 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:53:49 crc kubenswrapper[4838]: E0202 10:53:49.192088 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 10:53:49 crc kubenswrapper[4838]: E0202 10:53:49.192143 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 10:53:49 crc kubenswrapper[4838]: E0202 10:53:49.192164 4838 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:53:49 crc kubenswrapper[4838]: E0202 10:53:49.192123 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 10:53:51.192100579 +0000 UTC m=+25.529201647 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:53:49 crc kubenswrapper[4838]: E0202 10:53:49.192270 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 10:53:51.192244492 +0000 UTC m=+25.529345550 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:53:49 crc kubenswrapper[4838]: I0202 10:53:49.449073 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 20:56:25.249864879 +0000 UTC Feb 02 10:53:49 crc kubenswrapper[4838]: I0202 10:53:49.505844 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:53:49 crc kubenswrapper[4838]: I0202 10:53:49.505849 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:53:49 crc kubenswrapper[4838]: E0202 10:53:49.505982 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:53:49 crc kubenswrapper[4838]: E0202 10:53:49.506089 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.147690 4838 csr.go:261] certificate signing request csr-gftfb is approved, waiting to be issued Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.198669 4838 csr.go:257] certificate signing request csr-gftfb is issued Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.449693 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 02:03:23.255966685 +0000 UTC Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.505438 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:53:50 crc kubenswrapper[4838]: E0202 10:53:50.505544 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.734498 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-mz9jt"] Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.734859 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-mz9jt" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.736973 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-ndxhv"] Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.737302 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.737396 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.738309 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.741307 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.741320 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.741391 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.741520 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.741689 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.744369 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.753128 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-xrkv9"] Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.753793 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-n7ctv"] Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.753970 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.754094 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 10:53:50 crc kubenswrapper[4838]: W0202 10:53:50.756806 4838 reflector.go:561] object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": failed to list *v1.Secret: secrets "machine-config-daemon-dockercfg-r5tcq" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Feb 02 10:53:50 crc kubenswrapper[4838]: W0202 10:53:50.756827 4838 reflector.go:561] object-"openshift-machine-config-operator"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Feb 02 10:53:50 crc kubenswrapper[4838]: E0202 10:53:50.756853 4838 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"machine-config-daemon-dockercfg-r5tcq\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-config-daemon-dockercfg-r5tcq\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Feb 02 10:53:50 crc kubenswrapper[4838]: E0202 10:53:50.756881 4838 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.757038 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Feb 02 10:53:50 crc kubenswrapper[4838]: W0202 10:53:50.757237 4838 reflector.go:561] object-"openshift-machine-config-operator"/"kube-rbac-proxy": failed to list *v1.ConfigMap: configmaps "kube-rbac-proxy" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Feb 02 10:53:50 crc kubenswrapper[4838]: E0202 10:53:50.757285 4838 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"kube-rbac-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-rbac-proxy\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Feb 02 10:53:50 crc kubenswrapper[4838]: W0202 10:53:50.757633 4838 reflector.go:561] object-"openshift-machine-config-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Feb 02 10:53:50 crc kubenswrapper[4838]: E0202 10:53:50.757654 4838 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Feb 02 10:53:50 crc kubenswrapper[4838]: W0202 10:53:50.757772 4838 reflector.go:561] object-"openshift-machine-config-operator"/"proxy-tls": failed to list *v1.Secret: secrets "proxy-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Feb 02 10:53:50 crc kubenswrapper[4838]: E0202 10:53:50.757787 4838 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"proxy-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"proxy-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.757972 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.789385 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:50Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.805604 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-host-var-lib-cni-multus\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.805660 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4j6f\" (UniqueName: \"kubernetes.io/projected/464d6539-c3a0-4529-b9a7-45211255c1dc-kube-api-access-c4j6f\") pod \"multus-additional-cni-plugins-xrkv9\" (UID: \"464d6539-c3a0-4529-b9a7-45211255c1dc\") " pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.805685 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-host-run-netns\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.805706 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-multus-cni-dir\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.805723 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-host-var-lib-kubelet\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.805740 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-system-cni-dir\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.805757 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c1e0f2bd-7afa-44f4-a3cb-cad88c063dce-proxy-tls\") pod \"machine-config-daemon-n7ctv\" (UID: \"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\") " pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.805830 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/89c31cbf-ff09-4ee1-91eb-0ce82d805dd7-hosts-file\") pod \"node-resolver-mz9jt\" (UID: \"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\") " pod="openshift-dns/node-resolver-mz9jt" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.805876 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-multus-socket-dir-parent\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.805935 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c1e0f2bd-7afa-44f4-a3cb-cad88c063dce-mcd-auth-proxy-config\") pod \"machine-config-daemon-n7ctv\" (UID: \"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\") " pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.805971 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/464d6539-c3a0-4529-b9a7-45211255c1dc-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-xrkv9\" (UID: \"464d6539-c3a0-4529-b9a7-45211255c1dc\") " pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.806002 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfsfr\" (UniqueName: \"kubernetes.io/projected/89c31cbf-ff09-4ee1-91eb-0ce82d805dd7-kube-api-access-vfsfr\") pod \"node-resolver-mz9jt\" (UID: \"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\") " pod="openshift-dns/node-resolver-mz9jt" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.806027 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/ddc2e893-5801-4e73-a5f6-9cc52f733f49-cni-binary-copy\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.806054 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-multus-conf-dir\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.806138 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-cnibin\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.806165 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/464d6539-c3a0-4529-b9a7-45211255c1dc-cnibin\") pod \"multus-additional-cni-plugins-xrkv9\" (UID: \"464d6539-c3a0-4529-b9a7-45211255c1dc\") " pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.806185 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/464d6539-c3a0-4529-b9a7-45211255c1dc-cni-binary-copy\") pod \"multus-additional-cni-plugins-xrkv9\" (UID: \"464d6539-c3a0-4529-b9a7-45211255c1dc\") " pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.806237 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-host-var-lib-cni-bin\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.806268 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-etc-kubernetes\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.806289 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/464d6539-c3a0-4529-b9a7-45211255c1dc-os-release\") pod \"multus-additional-cni-plugins-xrkv9\" (UID: \"464d6539-c3a0-4529-b9a7-45211255c1dc\") " pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.806311 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-hostroot\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.806328 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/464d6539-c3a0-4529-b9a7-45211255c1dc-system-cni-dir\") pod \"multus-additional-cni-plugins-xrkv9\" (UID: \"464d6539-c3a0-4529-b9a7-45211255c1dc\") " pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.806367 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tvlz\" (UniqueName: \"kubernetes.io/projected/ddc2e893-5801-4e73-a5f6-9cc52f733f49-kube-api-access-5tvlz\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.806468 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jmt8\" (UniqueName: \"kubernetes.io/projected/c1e0f2bd-7afa-44f4-a3cb-cad88c063dce-kube-api-access-4jmt8\") pod \"machine-config-daemon-n7ctv\" (UID: \"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\") " pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.806569 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/464d6539-c3a0-4529-b9a7-45211255c1dc-tuning-conf-dir\") pod \"multus-additional-cni-plugins-xrkv9\" (UID: \"464d6539-c3a0-4529-b9a7-45211255c1dc\") " pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.806639 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/ddc2e893-5801-4e73-a5f6-9cc52f733f49-multus-daemon-config\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.806688 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-os-release\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.806726 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-host-run-k8s-cni-cncf-io\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.806748 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/c1e0f2bd-7afa-44f4-a3cb-cad88c063dce-rootfs\") pod \"machine-config-daemon-n7ctv\" (UID: \"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\") " pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.806767 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-host-run-multus-certs\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.812330 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:50Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.826998 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:50Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.839930 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:50Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.852599 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:50Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.865048 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:50Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.881179 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:50Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.897258 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:50Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.907735 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/464d6539-c3a0-4529-b9a7-45211255c1dc-system-cni-dir\") pod \"multus-additional-cni-plugins-xrkv9\" (UID: \"464d6539-c3a0-4529-b9a7-45211255c1dc\") " pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.907777 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tvlz\" (UniqueName: \"kubernetes.io/projected/ddc2e893-5801-4e73-a5f6-9cc52f733f49-kube-api-access-5tvlz\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.907797 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jmt8\" (UniqueName: \"kubernetes.io/projected/c1e0f2bd-7afa-44f4-a3cb-cad88c063dce-kube-api-access-4jmt8\") pod \"machine-config-daemon-n7ctv\" (UID: \"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\") " pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.907813 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/464d6539-c3a0-4529-b9a7-45211255c1dc-tuning-conf-dir\") pod \"multus-additional-cni-plugins-xrkv9\" (UID: \"464d6539-c3a0-4529-b9a7-45211255c1dc\") " pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.907839 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/ddc2e893-5801-4e73-a5f6-9cc52f733f49-multus-daemon-config\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.907857 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-os-release\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.907873 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-host-run-k8s-cni-cncf-io\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.907888 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/c1e0f2bd-7afa-44f4-a3cb-cad88c063dce-rootfs\") pod \"machine-config-daemon-n7ctv\" (UID: \"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\") " pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.907902 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-host-run-multus-certs\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.907922 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-host-var-lib-cni-multus\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.907938 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4j6f\" (UniqueName: \"kubernetes.io/projected/464d6539-c3a0-4529-b9a7-45211255c1dc-kube-api-access-c4j6f\") pod \"multus-additional-cni-plugins-xrkv9\" (UID: \"464d6539-c3a0-4529-b9a7-45211255c1dc\") " pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.907954 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-host-run-netns\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.907973 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-multus-cni-dir\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.907987 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-host-var-lib-kubelet\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908003 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-system-cni-dir\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908019 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c1e0f2bd-7afa-44f4-a3cb-cad88c063dce-proxy-tls\") pod \"machine-config-daemon-n7ctv\" (UID: \"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\") " pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908033 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/89c31cbf-ff09-4ee1-91eb-0ce82d805dd7-hosts-file\") pod \"node-resolver-mz9jt\" (UID: \"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\") " pod="openshift-dns/node-resolver-mz9jt" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908047 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-multus-socket-dir-parent\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908060 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c1e0f2bd-7afa-44f4-a3cb-cad88c063dce-mcd-auth-proxy-config\") pod \"machine-config-daemon-n7ctv\" (UID: \"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\") " pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908074 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/464d6539-c3a0-4529-b9a7-45211255c1dc-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-xrkv9\" (UID: \"464d6539-c3a0-4529-b9a7-45211255c1dc\") " pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908092 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfsfr\" (UniqueName: \"kubernetes.io/projected/89c31cbf-ff09-4ee1-91eb-0ce82d805dd7-kube-api-access-vfsfr\") pod \"node-resolver-mz9jt\" (UID: \"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\") " pod="openshift-dns/node-resolver-mz9jt" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908108 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/ddc2e893-5801-4e73-a5f6-9cc52f733f49-cni-binary-copy\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908124 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-multus-conf-dir\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908144 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-cnibin\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908168 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/464d6539-c3a0-4529-b9a7-45211255c1dc-cnibin\") pod \"multus-additional-cni-plugins-xrkv9\" (UID: \"464d6539-c3a0-4529-b9a7-45211255c1dc\") " pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908184 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/464d6539-c3a0-4529-b9a7-45211255c1dc-cni-binary-copy\") pod \"multus-additional-cni-plugins-xrkv9\" (UID: \"464d6539-c3a0-4529-b9a7-45211255c1dc\") " pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908198 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-host-var-lib-cni-bin\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908212 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-etc-kubernetes\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908225 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/464d6539-c3a0-4529-b9a7-45211255c1dc-os-release\") pod \"multus-additional-cni-plugins-xrkv9\" (UID: \"464d6539-c3a0-4529-b9a7-45211255c1dc\") " pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908247 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-hostroot\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908297 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-hostroot\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908328 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/464d6539-c3a0-4529-b9a7-45211255c1dc-system-cni-dir\") pod \"multus-additional-cni-plugins-xrkv9\" (UID: \"464d6539-c3a0-4529-b9a7-45211255c1dc\") " pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908768 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/89c31cbf-ff09-4ee1-91eb-0ce82d805dd7-hosts-file\") pod \"node-resolver-mz9jt\" (UID: \"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\") " pod="openshift-dns/node-resolver-mz9jt" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908804 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-multus-conf-dir\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908846 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-host-var-lib-cni-bin\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.908928 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/464d6539-c3a0-4529-b9a7-45211255c1dc-cnibin\") pod \"multus-additional-cni-plugins-xrkv9\" (UID: \"464d6539-c3a0-4529-b9a7-45211255c1dc\") " pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.909004 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-etc-kubernetes\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.909024 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-cnibin\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.909050 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-host-run-netns\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.909101 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-multus-cni-dir\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.909099 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/c1e0f2bd-7afa-44f4-a3cb-cad88c063dce-rootfs\") pod \"machine-config-daemon-n7ctv\" (UID: \"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\") " pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.909109 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-host-run-multus-certs\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.909136 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-host-run-k8s-cni-cncf-io\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.909106 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/464d6539-c3a0-4529-b9a7-45211255c1dc-os-release\") pod \"multus-additional-cni-plugins-xrkv9\" (UID: \"464d6539-c3a0-4529-b9a7-45211255c1dc\") " pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.909137 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-host-var-lib-cni-multus\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.909160 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-os-release\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.909242 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-host-var-lib-kubelet\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.909265 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/464d6539-c3a0-4529-b9a7-45211255c1dc-tuning-conf-dir\") pod \"multus-additional-cni-plugins-xrkv9\" (UID: \"464d6539-c3a0-4529-b9a7-45211255c1dc\") " pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.909715 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-multus-socket-dir-parent\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.909738 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/ddc2e893-5801-4e73-a5f6-9cc52f733f49-multus-daemon-config\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.909759 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/464d6539-c3a0-4529-b9a7-45211255c1dc-cni-binary-copy\") pod \"multus-additional-cni-plugins-xrkv9\" (UID: \"464d6539-c3a0-4529-b9a7-45211255c1dc\") " pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.909821 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/464d6539-c3a0-4529-b9a7-45211255c1dc-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-xrkv9\" (UID: \"464d6539-c3a0-4529-b9a7-45211255c1dc\") " pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.909821 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/ddc2e893-5801-4e73-a5f6-9cc52f733f49-cni-binary-copy\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.909889 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/ddc2e893-5801-4e73-a5f6-9cc52f733f49-system-cni-dir\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.913575 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:50Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.930993 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4j6f\" (UniqueName: \"kubernetes.io/projected/464d6539-c3a0-4529-b9a7-45211255c1dc-kube-api-access-c4j6f\") pod \"multus-additional-cni-plugins-xrkv9\" (UID: \"464d6539-c3a0-4529-b9a7-45211255c1dc\") " pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.933424 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:50Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.936265 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tvlz\" (UniqueName: \"kubernetes.io/projected/ddc2e893-5801-4e73-a5f6-9cc52f733f49-kube-api-access-5tvlz\") pod \"multus-ndxhv\" (UID: \"ddc2e893-5801-4e73-a5f6-9cc52f733f49\") " pod="openshift-multus/multus-ndxhv" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.937271 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfsfr\" (UniqueName: \"kubernetes.io/projected/89c31cbf-ff09-4ee1-91eb-0ce82d805dd7-kube-api-access-vfsfr\") pod \"node-resolver-mz9jt\" (UID: \"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\") " pod="openshift-dns/node-resolver-mz9jt" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.960504 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:50Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:50 crc kubenswrapper[4838]: I0202 10:53:50.981717 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:50Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.001477 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:50Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.017866 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.029853 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.038760 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.057338 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-mz9jt" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.057586 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.079766 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-ndxhv" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.079964 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.093807 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.109304 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.109383 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:53:51 crc kubenswrapper[4838]: E0202 10:53:51.109448 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:53:55.109424421 +0000 UTC m=+29.446525449 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:53:51 crc kubenswrapper[4838]: E0202 10:53:51.109450 4838 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.109494 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:53:51 crc kubenswrapper[4838]: E0202 10:53:51.109501 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 10:53:55.109491913 +0000 UTC m=+29.446592941 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 10:53:51 crc kubenswrapper[4838]: E0202 10:53:51.109589 4838 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 10:53:51 crc kubenswrapper[4838]: E0202 10:53:51.109648 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 10:53:55.109635047 +0000 UTC m=+29.446736075 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.112891 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.131177 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-66l9c"] Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.132054 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.133909 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.134169 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.134357 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.134502 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.134854 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.134955 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.135069 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.158862 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.182603 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.199940 4838 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-02-02 10:48:50 +0000 UTC, rotation deadline is 2026-12-02 09:40:20.735226579 +0000 UTC Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.199975 4838 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7270h46m29.535255232s for next certificate rotation Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.217183 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-kubelet\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.217229 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-etc-openvswitch\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.218644 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-ovn-node-metrics-cert\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.218698 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-run-netns\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.218734 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-node-log\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.218761 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-log-socket\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.218789 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.218818 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.218839 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-run-ovn\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.218877 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-systemd-units\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.218907 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-run-systemd\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.218930 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-env-overrides\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.218946 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-run-ovn-kubernetes\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.218984 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-cni-bin\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.219003 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7p8h\" (UniqueName: \"kubernetes.io/projected/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-kube-api-access-w7p8h\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.219023 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-run-openvswitch\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.219047 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-var-lib-openvswitch\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.219068 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-slash\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.219088 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-ovnkube-config\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.219109 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-cni-netd\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.219131 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-ovnkube-script-lib\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.219160 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: E0202 10:53:51.219368 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 10:53:51 crc kubenswrapper[4838]: E0202 10:53:51.219397 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 10:53:51 crc kubenswrapper[4838]: E0202 10:53:51.219408 4838 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:53:51 crc kubenswrapper[4838]: E0202 10:53:51.219450 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 10:53:55.21943704 +0000 UTC m=+29.556538068 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:53:51 crc kubenswrapper[4838]: E0202 10:53:51.219861 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 10:53:51 crc kubenswrapper[4838]: E0202 10:53:51.219880 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 10:53:51 crc kubenswrapper[4838]: E0202 10:53:51.219888 4838 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:53:51 crc kubenswrapper[4838]: E0202 10:53:51.219920 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 10:53:55.219909392 +0000 UTC m=+29.557010420 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.247836 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.276842 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.309563 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.320310 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.320463 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-run-ovn-kubernetes\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.320544 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-cni-bin\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.320564 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-run-ovn-kubernetes\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.320576 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7p8h\" (UniqueName: \"kubernetes.io/projected/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-kube-api-access-w7p8h\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.320646 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-cni-bin\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.320734 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-var-lib-openvswitch\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.320758 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-run-openvswitch\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.320802 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-var-lib-openvswitch\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.320832 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-slash\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.320895 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-ovnkube-config\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.320922 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-cni-netd\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.320941 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-ovnkube-script-lib\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.320977 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321023 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-kubelet\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321039 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-etc-openvswitch\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321066 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-ovn-node-metrics-cert\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321087 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-run-netns\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321104 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-node-log\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321121 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-log-socket\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321143 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-slash\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321173 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-run-ovn\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321214 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-systemd-units\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321219 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-node-log\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321251 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-log-socket\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321255 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-systemd-units\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321279 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-run-ovn\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321313 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-run-netns\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321340 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-kubelet\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321357 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321375 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-etc-openvswitch\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321396 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-run-systemd\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321423 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-env-overrides\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321474 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-run-systemd\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321810 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-ovnkube-script-lib\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321888 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-cni-netd\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321948 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-env-overrides\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321951 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-ovnkube-config\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.321998 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-run-openvswitch\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.329668 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-ovn-node-metrics-cert\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.336830 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.338240 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7p8h\" (UniqueName: \"kubernetes.io/projected/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-kube-api-access-w7p8h\") pod \"ovnkube-node-66l9c\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.353325 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.371869 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.384816 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.406014 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.420224 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.443354 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.449889 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 21:40:50.335564992 +0000 UTC Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.505413 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:53:51 crc kubenswrapper[4838]: E0202 10:53:51.505527 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.505596 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:53:51 crc kubenswrapper[4838]: E0202 10:53:51.505714 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.667448 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-mz9jt" event={"ID":"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7","Type":"ContainerStarted","Data":"62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209"} Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.667726 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-mz9jt" event={"ID":"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7","Type":"ContainerStarted","Data":"13ca4b54c9eda2e83937b82c11c919ee939886ed1644975c93276fa446025652"} Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.669971 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84"} Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.672230 4838 generic.go:334] "Generic (PLEG): container finished" podID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerID="0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd" exitCode=0 Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.672323 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerDied","Data":"0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd"} Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.672350 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerStarted","Data":"9bb4c34ffb8e28c5369bc34e43e9b0bd2827da1a18ac470deb3887221060994a"} Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.677824 4838 generic.go:334] "Generic (PLEG): container finished" podID="464d6539-c3a0-4529-b9a7-45211255c1dc" containerID="2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971" exitCode=0 Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.677890 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" event={"ID":"464d6539-c3a0-4529-b9a7-45211255c1dc","Type":"ContainerDied","Data":"2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971"} Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.677934 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" event={"ID":"464d6539-c3a0-4529-b9a7-45211255c1dc","Type":"ContainerStarted","Data":"0789313cb7b453cea0cd84828d8207992b4c40993b1676bf222e3d8c27f28a4e"} Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.680206 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ndxhv" event={"ID":"ddc2e893-5801-4e73-a5f6-9cc52f733f49","Type":"ContainerStarted","Data":"479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108"} Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.680250 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ndxhv" event={"ID":"ddc2e893-5801-4e73-a5f6-9cc52f733f49","Type":"ContainerStarted","Data":"6027c80d19de01a4f4742448f9a9282ca23173e2b65681a0e69ab9aa15dbecb0"} Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.684861 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.701835 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.732678 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.756760 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.772808 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.786337 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.790490 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.805675 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.821118 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.838661 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.841462 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.853016 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.862976 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.868705 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.877253 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c1e0f2bd-7afa-44f4-a3cb-cad88c063dce-proxy-tls\") pod \"machine-config-daemon-n7ctv\" (UID: \"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\") " pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.883935 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.899456 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: E0202 10:53:51.910031 4838 configmap.go:193] Couldn't get configMap openshift-machine-config-operator/kube-rbac-proxy: failed to sync configmap cache: timed out waiting for the condition Feb 02 10:53:51 crc kubenswrapper[4838]: E0202 10:53:51.910129 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c1e0f2bd-7afa-44f4-a3cb-cad88c063dce-mcd-auth-proxy-config podName:c1e0f2bd-7afa-44f4-a3cb-cad88c063dce nodeName:}" failed. No retries permitted until 2026-02-02 10:53:52.410106156 +0000 UTC m=+26.747207194 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "mcd-auth-proxy-config" (UniqueName: "kubernetes.io/configmap/c1e0f2bd-7afa-44f4-a3cb-cad88c063dce-mcd-auth-proxy-config") pod "machine-config-daemon-n7ctv" (UID: "c1e0f2bd-7afa-44f4-a3cb-cad88c063dce") : failed to sync configmap cache: timed out waiting for the condition Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.914282 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: E0202 10:53:51.922747 4838 projected.go:288] Couldn't get configMap openshift-machine-config-operator/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Feb 02 10:53:51 crc kubenswrapper[4838]: E0202 10:53:51.922795 4838 projected.go:194] Error preparing data for projected volume kube-api-access-4jmt8 for pod openshift-machine-config-operator/machine-config-daemon-n7ctv: failed to sync configmap cache: timed out waiting for the condition Feb 02 10:53:51 crc kubenswrapper[4838]: E0202 10:53:51.922859 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c1e0f2bd-7afa-44f4-a3cb-cad88c063dce-kube-api-access-4jmt8 podName:c1e0f2bd-7afa-44f4-a3cb-cad88c063dce nodeName:}" failed. No retries permitted until 2026-02-02 10:53:52.422837891 +0000 UTC m=+26.759938929 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-4jmt8" (UniqueName: "kubernetes.io/projected/c1e0f2bd-7afa-44f4-a3cb-cad88c063dce-kube-api-access-4jmt8") pod "machine-config-daemon-n7ctv" (UID: "c1e0f2bd-7afa-44f4-a3cb-cad88c063dce") : failed to sync configmap cache: timed out waiting for the condition Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.931901 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.949135 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.968248 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:51Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.969078 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.985587 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 10:53:51 crc kubenswrapper[4838]: I0202 10:53:51.994914 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.001454 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.007557 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.027635 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.042191 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.055118 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.070512 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.087065 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.103015 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.123951 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.134697 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.139412 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.162983 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.175355 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.198928 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.207429 4838 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.209040 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.209143 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.209206 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.209394 4838 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.215778 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.217009 4838 kubelet_node_status.go:115] "Node was previously registered" node="crc" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.217292 4838 kubelet_node_status.go:79] "Successfully registered node" node="crc" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.218219 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.218251 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.218261 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.218276 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.218288 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:52Z","lastTransitionTime":"2026-02-02T10:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.237200 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.252120 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.265007 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: E0202 10:53:52.266376 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.270389 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.270428 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.270455 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.270473 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.270485 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:52Z","lastTransitionTime":"2026-02-02T10:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.280851 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: E0202 10:53:52.282555 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.289314 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.289392 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.289409 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.289432 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.289450 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:52Z","lastTransitionTime":"2026-02-02T10:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.297216 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: E0202 10:53:52.303237 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.307307 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.307359 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.307376 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.307397 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.307422 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:52Z","lastTransitionTime":"2026-02-02T10:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.311310 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: E0202 10:53:52.322739 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.327099 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.328316 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.328368 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.328381 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.328402 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.328415 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:52Z","lastTransitionTime":"2026-02-02T10:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:52 crc kubenswrapper[4838]: E0202 10:53:52.346307 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: E0202 10:53:52.346454 4838 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.348306 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.348374 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.348390 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.348419 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.348438 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:52Z","lastTransitionTime":"2026-02-02T10:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.438722 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c1e0f2bd-7afa-44f4-a3cb-cad88c063dce-mcd-auth-proxy-config\") pod \"machine-config-daemon-n7ctv\" (UID: \"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\") " pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.438785 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jmt8\" (UniqueName: \"kubernetes.io/projected/c1e0f2bd-7afa-44f4-a3cb-cad88c063dce-kube-api-access-4jmt8\") pod \"machine-config-daemon-n7ctv\" (UID: \"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\") " pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.439808 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c1e0f2bd-7afa-44f4-a3cb-cad88c063dce-mcd-auth-proxy-config\") pod \"machine-config-daemon-n7ctv\" (UID: \"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\") " pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.442087 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jmt8\" (UniqueName: \"kubernetes.io/projected/c1e0f2bd-7afa-44f4-a3cb-cad88c063dce-kube-api-access-4jmt8\") pod \"machine-config-daemon-n7ctv\" (UID: \"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\") " pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.450207 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.450254 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.450268 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.450285 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.450298 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:52Z","lastTransitionTime":"2026-02-02T10:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.450889 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 14:57:29.140175196 +0000 UTC Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.505940 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:53:52 crc kubenswrapper[4838]: E0202 10:53:52.506052 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.553449 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.553502 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.553521 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.553545 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.553564 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:52Z","lastTransitionTime":"2026-02-02T10:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.598200 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 10:53:52 crc kubenswrapper[4838]: W0202 10:53:52.610883 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc1e0f2bd_7afa_44f4_a3cb_cad88c063dce.slice/crio-9c95a15044ef53bad97eebe3fe05d3903ec0e0a240804c9ee289d243a3e12c25 WatchSource:0}: Error finding container 9c95a15044ef53bad97eebe3fe05d3903ec0e0a240804c9ee289d243a3e12c25: Status 404 returned error can't find the container with id 9c95a15044ef53bad97eebe3fe05d3903ec0e0a240804c9ee289d243a3e12c25 Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.656596 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.656686 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.656704 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.656726 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.656741 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:52Z","lastTransitionTime":"2026-02-02T10:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.684783 4838 generic.go:334] "Generic (PLEG): container finished" podID="464d6539-c3a0-4529-b9a7-45211255c1dc" containerID="d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184" exitCode=0 Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.684832 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" event={"ID":"464d6539-c3a0-4529-b9a7-45211255c1dc","Type":"ContainerDied","Data":"d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184"} Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.693436 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerStarted","Data":"d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d"} Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.693496 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerStarted","Data":"c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2"} Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.693530 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerStarted","Data":"7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682"} Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.693552 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerStarted","Data":"6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572"} Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.693570 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerStarted","Data":"188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f"} Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.693596 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerStarted","Data":"047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d"} Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.697693 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerStarted","Data":"9c95a15044ef53bad97eebe3fe05d3903ec0e0a240804c9ee289d243a3e12c25"} Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.700641 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.713309 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.728423 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.741297 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.761121 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.761167 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.761178 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.761193 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.761204 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:52Z","lastTransitionTime":"2026-02-02T10:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.766958 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.785073 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.798922 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.830097 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.851678 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.863976 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.864066 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.864080 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.864100 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.864118 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:52Z","lastTransitionTime":"2026-02-02T10:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.868154 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.881975 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.896130 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.908822 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:52Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.966371 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.966408 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.966416 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.966429 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:52 crc kubenswrapper[4838]: I0202 10:53:52.966439 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:52Z","lastTransitionTime":"2026-02-02T10:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.069528 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.069578 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.069590 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.069606 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.069637 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:53Z","lastTransitionTime":"2026-02-02T10:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.172056 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.172119 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.172136 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.172157 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.172173 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:53Z","lastTransitionTime":"2026-02-02T10:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.275113 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.275164 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.275176 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.275191 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.275203 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:53Z","lastTransitionTime":"2026-02-02T10:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.378072 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.378138 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.378155 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.378180 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.378196 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:53Z","lastTransitionTime":"2026-02-02T10:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.389782 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-ftlpr"] Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.390288 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-ftlpr" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.393249 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.393423 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.393746 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.393838 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.412012 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.430180 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.450917 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.451222 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 19:18:41.567259494 +0000 UTC Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.471138 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.481339 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.481412 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.481431 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.481457 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.481478 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:53Z","lastTransitionTime":"2026-02-02T10:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.486512 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.505745 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:53:53 crc kubenswrapper[4838]: E0202 10:53:53.505957 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.506496 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:53:53 crc kubenswrapper[4838]: E0202 10:53:53.506675 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.509547 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.529279 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.548474 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.549864 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3c555924-1f4c-4168-b9da-61f639e8e50d-serviceca\") pod \"node-ca-ftlpr\" (UID: \"3c555924-1f4c-4168-b9da-61f639e8e50d\") " pod="openshift-image-registry/node-ca-ftlpr" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.549950 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r429m\" (UniqueName: \"kubernetes.io/projected/3c555924-1f4c-4168-b9da-61f639e8e50d-kube-api-access-r429m\") pod \"node-ca-ftlpr\" (UID: \"3c555924-1f4c-4168-b9da-61f639e8e50d\") " pod="openshift-image-registry/node-ca-ftlpr" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.550029 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3c555924-1f4c-4168-b9da-61f639e8e50d-host\") pod \"node-ca-ftlpr\" (UID: \"3c555924-1f4c-4168-b9da-61f639e8e50d\") " pod="openshift-image-registry/node-ca-ftlpr" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.580956 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.583863 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.583929 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.583952 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.583983 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.584019 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:53Z","lastTransitionTime":"2026-02-02T10:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.596099 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.613899 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.630109 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.650924 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3c555924-1f4c-4168-b9da-61f639e8e50d-host\") pod \"node-ca-ftlpr\" (UID: \"3c555924-1f4c-4168-b9da-61f639e8e50d\") " pod="openshift-image-registry/node-ca-ftlpr" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.651057 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3c555924-1f4c-4168-b9da-61f639e8e50d-host\") pod \"node-ca-ftlpr\" (UID: \"3c555924-1f4c-4168-b9da-61f639e8e50d\") " pod="openshift-image-registry/node-ca-ftlpr" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.651070 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3c555924-1f4c-4168-b9da-61f639e8e50d-serviceca\") pod \"node-ca-ftlpr\" (UID: \"3c555924-1f4c-4168-b9da-61f639e8e50d\") " pod="openshift-image-registry/node-ca-ftlpr" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.651150 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r429m\" (UniqueName: \"kubernetes.io/projected/3c555924-1f4c-4168-b9da-61f639e8e50d-kube-api-access-r429m\") pod \"node-ca-ftlpr\" (UID: \"3c555924-1f4c-4168-b9da-61f639e8e50d\") " pod="openshift-image-registry/node-ca-ftlpr" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.652975 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3c555924-1f4c-4168-b9da-61f639e8e50d-serviceca\") pod \"node-ca-ftlpr\" (UID: \"3c555924-1f4c-4168-b9da-61f639e8e50d\") " pod="openshift-image-registry/node-ca-ftlpr" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.655320 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.671147 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.685070 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r429m\" (UniqueName: \"kubernetes.io/projected/3c555924-1f4c-4168-b9da-61f639e8e50d-kube-api-access-r429m\") pod \"node-ca-ftlpr\" (UID: \"3c555924-1f4c-4168-b9da-61f639e8e50d\") " pod="openshift-image-registry/node-ca-ftlpr" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.687691 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.687735 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.687752 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.687777 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.687794 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:53Z","lastTransitionTime":"2026-02-02T10:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.707692 4838 generic.go:334] "Generic (PLEG): container finished" podID="464d6539-c3a0-4529-b9a7-45211255c1dc" containerID="1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274" exitCode=0 Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.707800 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" event={"ID":"464d6539-c3a0-4529-b9a7-45211255c1dc","Type":"ContainerDied","Data":"1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274"} Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.711265 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerStarted","Data":"ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f"} Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.711323 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerStarted","Data":"f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef"} Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.713951 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-ftlpr" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.730541 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: W0202 10:53:53.747009 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3c555924_1f4c_4168_b9da_61f639e8e50d.slice/crio-21659b2238978ad7e16513ec0d611673a251bbf8d0465effb3060498d6e7e82f WatchSource:0}: Error finding container 21659b2238978ad7e16513ec0d611673a251bbf8d0465effb3060498d6e7e82f: Status 404 returned error can't find the container with id 21659b2238978ad7e16513ec0d611673a251bbf8d0465effb3060498d6e7e82f Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.753244 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.773550 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.787320 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.791069 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.791127 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.791145 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.791169 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.791187 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:53Z","lastTransitionTime":"2026-02-02T10:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.804550 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.818840 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.834666 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.852247 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.868842 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.881672 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.893784 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.893817 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.893828 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.893843 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.893854 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:53Z","lastTransitionTime":"2026-02-02T10:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.912000 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.923248 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.942248 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.954699 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.967110 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.989198 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:53Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.995939 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.995991 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.996002 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.996019 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:53 crc kubenswrapper[4838]: I0202 10:53:53.996031 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:53Z","lastTransitionTime":"2026-02-02T10:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.025865 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.065717 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.098717 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.098750 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.098761 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.098776 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.098786 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:54Z","lastTransitionTime":"2026-02-02T10:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.104546 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.148861 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.184530 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.200842 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.200871 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.200883 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.200906 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.200918 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:54Z","lastTransitionTime":"2026-02-02T10:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.227122 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.262735 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.302935 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.302964 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.302974 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.302990 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.303000 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:54Z","lastTransitionTime":"2026-02-02T10:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.314422 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.350906 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.400070 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.405149 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.405178 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.405189 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.405202 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.405211 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:54Z","lastTransitionTime":"2026-02-02T10:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.431262 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.452202 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 06:31:20.233695309 +0000 UTC Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.468967 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.505528 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:53:54 crc kubenswrapper[4838]: E0202 10:53:54.506696 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.507486 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.507563 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.507633 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.507699 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.507756 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:54Z","lastTransitionTime":"2026-02-02T10:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.610049 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.610327 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.610339 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.610355 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.610365 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:54Z","lastTransitionTime":"2026-02-02T10:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.713496 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.713533 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.713544 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.713572 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.713583 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:54Z","lastTransitionTime":"2026-02-02T10:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.720498 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerStarted","Data":"da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c"} Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.722557 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-ftlpr" event={"ID":"3c555924-1f4c-4168-b9da-61f639e8e50d","Type":"ContainerStarted","Data":"29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97"} Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.722594 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-ftlpr" event={"ID":"3c555924-1f4c-4168-b9da-61f639e8e50d","Type":"ContainerStarted","Data":"21659b2238978ad7e16513ec0d611673a251bbf8d0465effb3060498d6e7e82f"} Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.729952 4838 generic.go:334] "Generic (PLEG): container finished" podID="464d6539-c3a0-4529-b9a7-45211255c1dc" containerID="27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0" exitCode=0 Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.730572 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" event={"ID":"464d6539-c3a0-4529-b9a7-45211255c1dc","Type":"ContainerDied","Data":"27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0"} Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.742867 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.755533 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.768093 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.781095 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.791357 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.803479 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.816054 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.816099 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.816112 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.816127 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.816138 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:54Z","lastTransitionTime":"2026-02-02T10:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.817885 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.829757 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.839773 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.866804 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.927059 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.930566 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.930604 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.930634 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.930654 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.930666 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:54Z","lastTransitionTime":"2026-02-02T10:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.961066 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:54 crc kubenswrapper[4838]: I0202 10:53:54.985953 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:54Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.024696 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.033296 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.033329 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.033337 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.033351 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.033359 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:55Z","lastTransitionTime":"2026-02-02T10:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.065152 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.111461 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.135595 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.135640 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.135651 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.135666 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.135676 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:55Z","lastTransitionTime":"2026-02-02T10:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.152409 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.170905 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:53:55 crc kubenswrapper[4838]: E0202 10:53:55.171226 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:54:03.171196481 +0000 UTC m=+37.508297549 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.171341 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.171399 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:53:55 crc kubenswrapper[4838]: E0202 10:53:55.171467 4838 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 10:53:55 crc kubenswrapper[4838]: E0202 10:53:55.171506 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 10:54:03.171497419 +0000 UTC m=+37.508598447 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 10:53:55 crc kubenswrapper[4838]: E0202 10:53:55.171845 4838 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 10:53:55 crc kubenswrapper[4838]: E0202 10:53:55.171890 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 10:54:03.171880249 +0000 UTC m=+37.508981277 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.186957 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.226459 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.243710 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.243803 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.243823 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.243847 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.243871 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:55Z","lastTransitionTime":"2026-02-02T10:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.272463 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.272540 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:53:55 crc kubenswrapper[4838]: E0202 10:53:55.272807 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 10:53:55 crc kubenswrapper[4838]: E0202 10:53:55.273282 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 10:53:55 crc kubenswrapper[4838]: E0202 10:53:55.273413 4838 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:53:55 crc kubenswrapper[4838]: E0202 10:53:55.273503 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 10:54:03.273475356 +0000 UTC m=+37.610576424 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:53:55 crc kubenswrapper[4838]: E0202 10:53:55.272912 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 10:53:55 crc kubenswrapper[4838]: E0202 10:53:55.274067 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 10:53:55 crc kubenswrapper[4838]: E0202 10:53:55.274091 4838 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:53:55 crc kubenswrapper[4838]: E0202 10:53:55.274168 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 10:54:03.274143854 +0000 UTC m=+37.611244912 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.275951 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.312012 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.347235 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.347297 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.347313 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.347336 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.347354 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:55Z","lastTransitionTime":"2026-02-02T10:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.367378 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.394165 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.430160 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.449926 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.449969 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.449982 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.450000 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.450013 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:55Z","lastTransitionTime":"2026-02-02T10:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.453291 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 13:53:38.073338824 +0000 UTC Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.469245 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.504766 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:53:55 crc kubenswrapper[4838]: E0202 10:53:55.504880 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.504943 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:53:55 crc kubenswrapper[4838]: E0202 10:53:55.505096 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.519782 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.549923 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.553235 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.553308 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.553327 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.553351 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.553371 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:55Z","lastTransitionTime":"2026-02-02T10:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.593145 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.656762 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.656878 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.656898 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.656964 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.656985 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:55Z","lastTransitionTime":"2026-02-02T10:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.740035 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" event={"ID":"464d6539-c3a0-4529-b9a7-45211255c1dc","Type":"ContainerStarted","Data":"4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9"} Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.756291 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.761670 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.761721 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.761735 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.761757 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.761772 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:55Z","lastTransitionTime":"2026-02-02T10:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.778300 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.797132 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.826651 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.849801 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.864826 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.864944 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.864965 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.864995 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.865571 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:55Z","lastTransitionTime":"2026-02-02T10:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.866516 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.881180 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.909850 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.952222 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.968827 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.968898 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.968929 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.968960 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.968983 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:55Z","lastTransitionTime":"2026-02-02T10:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:55 crc kubenswrapper[4838]: I0202 10:53:55.993913 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:55Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.032203 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.072560 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.072693 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.072716 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.072747 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.072770 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:56Z","lastTransitionTime":"2026-02-02T10:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.074594 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.113050 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.153438 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.176411 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.176466 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.176488 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.176517 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.176538 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:56Z","lastTransitionTime":"2026-02-02T10:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.252007 4838 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.289082 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.289156 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.289178 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.289208 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.289244 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:56Z","lastTransitionTime":"2026-02-02T10:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.392390 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.392438 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.392454 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.392477 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.392493 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:56Z","lastTransitionTime":"2026-02-02T10:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.454041 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 09:17:42.708054167 +0000 UTC Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.495679 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.495744 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.495761 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.495786 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.495807 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:56Z","lastTransitionTime":"2026-02-02T10:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.505277 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:53:56 crc kubenswrapper[4838]: E0202 10:53:56.505443 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.525726 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.540720 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.566910 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.588973 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.598777 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.598845 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.598864 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.598889 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.598906 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:56Z","lastTransitionTime":"2026-02-02T10:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.609339 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.625416 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.640795 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.660232 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.681825 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.702369 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.702432 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.702447 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.702465 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.702477 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:56Z","lastTransitionTime":"2026-02-02T10:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.703794 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.720496 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.737412 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.746511 4838 generic.go:334] "Generic (PLEG): container finished" podID="464d6539-c3a0-4529-b9a7-45211255c1dc" containerID="4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9" exitCode=0 Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.746584 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" event={"ID":"464d6539-c3a0-4529-b9a7-45211255c1dc","Type":"ContainerDied","Data":"4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9"} Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.753215 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.766230 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.779561 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.790655 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.805703 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.805745 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.805757 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.805775 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.805787 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:56Z","lastTransitionTime":"2026-02-02T10:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.831344 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.869404 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.908434 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.908796 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.908817 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.908841 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.908861 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:56Z","lastTransitionTime":"2026-02-02T10:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.923369 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.956013 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:56 crc kubenswrapper[4838]: I0202 10:53:56.987492 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:56Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.012082 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.012114 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.012124 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.012141 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.012154 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:57Z","lastTransitionTime":"2026-02-02T10:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.031387 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.070970 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.114353 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.114661 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.114699 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.114712 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.114729 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.114741 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:57Z","lastTransitionTime":"2026-02-02T10:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.155221 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.192574 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.217907 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.217971 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.217989 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.218020 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.218039 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:57Z","lastTransitionTime":"2026-02-02T10:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.233406 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.275576 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.320217 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.320258 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.320274 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.320293 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.320306 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:57Z","lastTransitionTime":"2026-02-02T10:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.424042 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.424106 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.424129 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.424156 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.424177 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:57Z","lastTransitionTime":"2026-02-02T10:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.454787 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 12:13:48.179780135 +0000 UTC Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.505449 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.505526 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:53:57 crc kubenswrapper[4838]: E0202 10:53:57.505697 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:53:57 crc kubenswrapper[4838]: E0202 10:53:57.505921 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.526831 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.526893 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.526917 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.526941 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.526958 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:57Z","lastTransitionTime":"2026-02-02T10:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.633704 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.633754 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.634150 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.634201 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.634467 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:57Z","lastTransitionTime":"2026-02-02T10:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.737540 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.737601 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.737649 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.737681 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.737705 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:57Z","lastTransitionTime":"2026-02-02T10:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.756312 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerStarted","Data":"fcab7894fcb8b1055505140881ce36efa0cf71530b210c7a4cdeccad1f6b23f3"} Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.756951 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.757004 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.765886 4838 generic.go:334] "Generic (PLEG): container finished" podID="464d6539-c3a0-4529-b9a7-45211255c1dc" containerID="bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee" exitCode=0 Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.765953 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" event={"ID":"464d6539-c3a0-4529-b9a7-45211255c1dc","Type":"ContainerDied","Data":"bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee"} Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.777540 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.790668 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.791790 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.797149 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.816182 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.829247 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.834041 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.839677 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.839726 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.839739 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.839756 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.839769 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:57Z","lastTransitionTime":"2026-02-02T10:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.839034 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.855362 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.868245 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.880410 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.889774 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.902448 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.911947 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.927003 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcab7894fcb8b1055505140881ce36efa0cf71530b210c7a4cdeccad1f6b23f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.942897 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.944531 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.944566 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.944578 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.944595 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.944610 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:57Z","lastTransitionTime":"2026-02-02T10:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.955373 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.970084 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:57 crc kubenswrapper[4838]: I0202 10:53:57.986578 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:57Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.004922 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.022830 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.039794 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.047261 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.047321 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.047340 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.047364 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.047382 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:58Z","lastTransitionTime":"2026-02-02T10:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.072231 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.113905 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.150391 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.150451 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.150464 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.150482 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.150494 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:58Z","lastTransitionTime":"2026-02-02T10:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.152063 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.201744 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcab7894fcb8b1055505140881ce36efa0cf71530b210c7a4cdeccad1f6b23f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.239231 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.253515 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.253572 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.253587 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.253610 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.253648 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:58Z","lastTransitionTime":"2026-02-02T10:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.269190 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.307695 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.350701 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.357224 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.357288 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.357315 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.357345 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.357367 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:58Z","lastTransitionTime":"2026-02-02T10:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.389034 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.456154 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 16:27:14.887199782 +0000 UTC Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.462286 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.462335 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.462354 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.462378 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.462396 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:58Z","lastTransitionTime":"2026-02-02T10:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.505589 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:53:58 crc kubenswrapper[4838]: E0202 10:53:58.505819 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.564925 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.565000 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.565019 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.565047 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.565067 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:58Z","lastTransitionTime":"2026-02-02T10:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.668281 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.668336 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.668353 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.668375 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.668393 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:58Z","lastTransitionTime":"2026-02-02T10:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.771395 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.771458 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.771476 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.771500 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.771516 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:58Z","lastTransitionTime":"2026-02-02T10:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.780523 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" event={"ID":"464d6539-c3a0-4529-b9a7-45211255c1dc","Type":"ContainerStarted","Data":"2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e"} Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.785360 4838 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.803300 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.825343 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.835784 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.853856 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcab7894fcb8b1055505140881ce36efa0cf71530b210c7a4cdeccad1f6b23f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.870782 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.874074 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.874128 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.874160 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.874174 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.874203 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:58Z","lastTransitionTime":"2026-02-02T10:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.881980 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.893146 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.904524 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.919357 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.936594 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.950354 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.966216 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.976841 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.976903 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.976923 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.976946 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.976964 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:58Z","lastTransitionTime":"2026-02-02T10:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:58 crc kubenswrapper[4838]: I0202 10:53:58.987570 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:58Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.005058 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:53:59Z is after 2025-08-24T17:21:41Z" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.079601 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.079711 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.079737 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.079772 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.079797 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:59Z","lastTransitionTime":"2026-02-02T10:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.182826 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.182897 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.182916 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.182937 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.182954 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:59Z","lastTransitionTime":"2026-02-02T10:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.285428 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.285467 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.285478 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.285494 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.285505 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:59Z","lastTransitionTime":"2026-02-02T10:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.388318 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.388390 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.388408 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.388434 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.388450 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:59Z","lastTransitionTime":"2026-02-02T10:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.456609 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 07:09:58.236260324 +0000 UTC Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.491245 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.491324 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.491343 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.491368 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.491392 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:59Z","lastTransitionTime":"2026-02-02T10:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.505569 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.505584 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:53:59 crc kubenswrapper[4838]: E0202 10:53:59.505779 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:53:59 crc kubenswrapper[4838]: E0202 10:53:59.505907 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.594869 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.594926 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.594944 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.594967 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.594984 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:59Z","lastTransitionTime":"2026-02-02T10:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.698542 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.698592 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.698608 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.698658 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.698676 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:59Z","lastTransitionTime":"2026-02-02T10:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.784454 4838 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.801016 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.801077 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.801095 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.801120 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.801137 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:59Z","lastTransitionTime":"2026-02-02T10:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.904248 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.904287 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.904299 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.904314 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:53:59 crc kubenswrapper[4838]: I0202 10:53:59.904324 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:53:59Z","lastTransitionTime":"2026-02-02T10:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.007382 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.007441 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.007458 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.007482 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.007499 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:00Z","lastTransitionTime":"2026-02-02T10:54:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.110176 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.110238 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.110255 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.110279 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.110296 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:00Z","lastTransitionTime":"2026-02-02T10:54:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.213190 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.213247 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.213265 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.213304 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.213323 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:00Z","lastTransitionTime":"2026-02-02T10:54:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.316915 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.317283 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.317608 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.317841 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.318006 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:00Z","lastTransitionTime":"2026-02-02T10:54:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.421519 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.421611 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.421652 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.421679 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.421698 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:00Z","lastTransitionTime":"2026-02-02T10:54:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.457322 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 23:18:43.933312052 +0000 UTC Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.505064 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:00 crc kubenswrapper[4838]: E0202 10:54:00.505248 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.525011 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.525055 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.525069 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.525085 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.525099 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:00Z","lastTransitionTime":"2026-02-02T10:54:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.628009 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.628074 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.628092 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.628124 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.628143 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:00Z","lastTransitionTime":"2026-02-02T10:54:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.731224 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.731277 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.731294 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.731317 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.731334 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:00Z","lastTransitionTime":"2026-02-02T10:54:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.835082 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.835148 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.835167 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.835197 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.835214 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:00Z","lastTransitionTime":"2026-02-02T10:54:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.938374 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.938444 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.938463 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.938521 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:00 crc kubenswrapper[4838]: I0202 10:54:00.938540 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:00Z","lastTransitionTime":"2026-02-02T10:54:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.041825 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.041878 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.041897 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.041921 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.041939 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:01Z","lastTransitionTime":"2026-02-02T10:54:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.145039 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.145106 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.145123 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.145148 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.145165 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:01Z","lastTransitionTime":"2026-02-02T10:54:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.250719 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.251063 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.251205 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.251345 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.251516 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:01Z","lastTransitionTime":"2026-02-02T10:54:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.354322 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.354425 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.354448 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.354476 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.354501 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:01Z","lastTransitionTime":"2026-02-02T10:54:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.456851 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.457367 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.457535 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.458515 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.457469 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 16:57:11.370406334 +0000 UTC Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.458740 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:01Z","lastTransitionTime":"2026-02-02T10:54:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.505350 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.505527 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:01 crc kubenswrapper[4838]: E0202 10:54:01.505537 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:01 crc kubenswrapper[4838]: E0202 10:54:01.505906 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.562401 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.562468 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.562486 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.562513 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.562530 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:01Z","lastTransitionTime":"2026-02-02T10:54:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.665418 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.665462 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.665478 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.665505 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.665522 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:01Z","lastTransitionTime":"2026-02-02T10:54:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.768408 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.768466 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.768484 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.768510 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.768536 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:01Z","lastTransitionTime":"2026-02-02T10:54:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.794380 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovnkube-controller/0.log" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.798863 4838 generic.go:334] "Generic (PLEG): container finished" podID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerID="fcab7894fcb8b1055505140881ce36efa0cf71530b210c7a4cdeccad1f6b23f3" exitCode=1 Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.798921 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerDied","Data":"fcab7894fcb8b1055505140881ce36efa0cf71530b210c7a4cdeccad1f6b23f3"} Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.800109 4838 scope.go:117] "RemoveContainer" containerID="fcab7894fcb8b1055505140881ce36efa0cf71530b210c7a4cdeccad1f6b23f3" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.821570 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:01Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.843794 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:01Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.867082 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:01Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.871895 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.871943 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.871959 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.871984 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.872001 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:01Z","lastTransitionTime":"2026-02-02T10:54:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.889392 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:01Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.909690 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:01Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.930192 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:01Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.948524 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:01Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.962801 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:01Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.976269 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.976342 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.976363 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.976424 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.976443 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:01Z","lastTransitionTime":"2026-02-02T10:54:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.977333 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:01Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:01 crc kubenswrapper[4838]: I0202 10:54:01.997351 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:01Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.014541 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:02Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.042900 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcab7894fcb8b1055505140881ce36efa0cf71530b210c7a4cdeccad1f6b23f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcab7894fcb8b1055505140881ce36efa0cf71530b210c7a4cdeccad1f6b23f3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:01Z\\\",\\\"message\\\":\\\"pping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.267948 6129 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0202 10:54:01.268032 6129 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268173 6129 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268487 6129 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 10:54:01.268560 6129 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268822 6129 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 10:54:01.268898 6129 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 10:54:01.268998 6129 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 10:54:01.269079 6129 factory.go:656] Stopping watch factory\\\\nI0202 10:54:01.269147 6129 ovnkube.go:599] Stopped ovnkube\\\\nI0202 10\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:02Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.055556 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:02Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.073553 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:02Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.078515 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.078599 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.078658 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.078695 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.078721 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:02Z","lastTransitionTime":"2026-02-02T10:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.182063 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.182123 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.182142 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.182166 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.182183 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:02Z","lastTransitionTime":"2026-02-02T10:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.285773 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.285835 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.285852 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.285876 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.285896 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:02Z","lastTransitionTime":"2026-02-02T10:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.389430 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.389495 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.389512 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.389535 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.389553 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:02Z","lastTransitionTime":"2026-02-02T10:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.459723 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 18:52:04.905389616 +0000 UTC Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.493355 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.493453 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.493477 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.493507 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.493530 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:02Z","lastTransitionTime":"2026-02-02T10:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.505355 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:02 crc kubenswrapper[4838]: E0202 10:54:02.505585 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.596439 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.596513 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.596535 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.596567 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.596589 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:02Z","lastTransitionTime":"2026-02-02T10:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.699459 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.699511 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.699525 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.699543 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.699555 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:02Z","lastTransitionTime":"2026-02-02T10:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.713030 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.713100 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.713118 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.713142 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.713161 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:02Z","lastTransitionTime":"2026-02-02T10:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:02 crc kubenswrapper[4838]: E0202 10:54:02.734721 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:02Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.739487 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.739549 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.739562 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.739581 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.739592 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:02Z","lastTransitionTime":"2026-02-02T10:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:02 crc kubenswrapper[4838]: E0202 10:54:02.758468 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:02Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.762833 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.762891 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.762908 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.762931 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.762949 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:02Z","lastTransitionTime":"2026-02-02T10:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:02 crc kubenswrapper[4838]: E0202 10:54:02.783584 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:02Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.788408 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.788460 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.788503 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.788523 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.788539 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:02Z","lastTransitionTime":"2026-02-02T10:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.804328 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovnkube-controller/0.log" Feb 02 10:54:02 crc kubenswrapper[4838]: E0202 10:54:02.808564 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:02Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.812875 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.812940 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.812957 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.812980 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.812997 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:02Z","lastTransitionTime":"2026-02-02T10:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.822436 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerStarted","Data":"10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433"} Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.822725 4838 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 10:54:02 crc kubenswrapper[4838]: E0202 10:54:02.836520 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:02Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:02 crc kubenswrapper[4838]: E0202 10:54:02.836806 4838 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.838634 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.838685 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.838699 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.838720 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.838734 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:02Z","lastTransitionTime":"2026-02-02T10:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.844064 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:02Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.865723 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:02Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.890349 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:02Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.902066 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:02Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.918577 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:02Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.931327 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:02Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.941277 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.941328 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.941340 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.941362 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.941376 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:02Z","lastTransitionTime":"2026-02-02T10:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.948638 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:02Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.958447 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:02Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.976566 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:02Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:02 crc kubenswrapper[4838]: I0202 10:54:02.993410 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:02Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.017246 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcab7894fcb8b1055505140881ce36efa0cf71530b210c7a4cdeccad1f6b23f3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:01Z\\\",\\\"message\\\":\\\"pping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.267948 6129 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0202 10:54:01.268032 6129 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268173 6129 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268487 6129 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 10:54:01.268560 6129 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268822 6129 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 10:54:01.268898 6129 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 10:54:01.268998 6129 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 10:54:01.269079 6129 factory.go:656] Stopping watch factory\\\\nI0202 10:54:01.269147 6129 ovnkube.go:599] Stopped ovnkube\\\\nI0202 10\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:03Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.036259 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:03Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.044021 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.044085 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.044109 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.044133 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.044151 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:03Z","lastTransitionTime":"2026-02-02T10:54:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.050206 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:03Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.069990 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:03Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.147970 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.148050 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.148076 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.148107 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.148126 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:03Z","lastTransitionTime":"2026-02-02T10:54:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.251144 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.251224 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.251247 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.251277 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.251303 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:03Z","lastTransitionTime":"2026-02-02T10:54:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.270099 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:54:03 crc kubenswrapper[4838]: E0202 10:54:03.270289 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:54:19.270251436 +0000 UTC m=+53.607352494 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.270387 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.270520 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:03 crc kubenswrapper[4838]: E0202 10:54:03.270551 4838 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 10:54:03 crc kubenswrapper[4838]: E0202 10:54:03.270698 4838 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 10:54:03 crc kubenswrapper[4838]: E0202 10:54:03.270704 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 10:54:19.270667247 +0000 UTC m=+53.607768345 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 10:54:03 crc kubenswrapper[4838]: E0202 10:54:03.270830 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 10:54:19.270805851 +0000 UTC m=+53.607906899 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.354885 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.355396 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.355455 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.355489 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.355513 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:03Z","lastTransitionTime":"2026-02-02T10:54:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.371778 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.371851 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:03 crc kubenswrapper[4838]: E0202 10:54:03.372026 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 10:54:03 crc kubenswrapper[4838]: E0202 10:54:03.372053 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 10:54:03 crc kubenswrapper[4838]: E0202 10:54:03.372072 4838 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:54:03 crc kubenswrapper[4838]: E0202 10:54:03.372141 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 10:54:19.37211979 +0000 UTC m=+53.709220858 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:54:03 crc kubenswrapper[4838]: E0202 10:54:03.372066 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 10:54:03 crc kubenswrapper[4838]: E0202 10:54:03.372216 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 10:54:03 crc kubenswrapper[4838]: E0202 10:54:03.372234 4838 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:54:03 crc kubenswrapper[4838]: E0202 10:54:03.372317 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 10:54:19.372293914 +0000 UTC m=+53.709395042 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.458774 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.458842 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.458860 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.458883 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.458900 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:03Z","lastTransitionTime":"2026-02-02T10:54:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.459862 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 06:44:50.100023246 +0000 UTC Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.505592 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.505672 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:03 crc kubenswrapper[4838]: E0202 10:54:03.505744 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:03 crc kubenswrapper[4838]: E0202 10:54:03.505836 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.562273 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.562329 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.562346 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.562368 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.562385 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:03Z","lastTransitionTime":"2026-02-02T10:54:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.665675 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.665739 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.665757 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.665785 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.665808 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:03Z","lastTransitionTime":"2026-02-02T10:54:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.768640 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.768693 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.768709 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.768731 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.768747 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:03Z","lastTransitionTime":"2026-02-02T10:54:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.817169 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf"] Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.817922 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.823293 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.823855 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.844384 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:03Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.864249 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:03Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.872043 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.872134 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.872154 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.872181 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.872201 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:03Z","lastTransitionTime":"2026-02-02T10:54:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.882162 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:03Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.908110 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:03Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.930019 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:03Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.961829 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcab7894fcb8b1055505140881ce36efa0cf71530b210c7a4cdeccad1f6b23f3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:01Z\\\",\\\"message\\\":\\\"pping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.267948 6129 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0202 10:54:01.268032 6129 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268173 6129 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268487 6129 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 10:54:01.268560 6129 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268822 6129 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 10:54:01.268898 6129 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 10:54:01.268998 6129 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 10:54:01.269079 6129 factory.go:656] Stopping watch factory\\\\nI0202 10:54:01.269147 6129 ovnkube.go:599] Stopped ovnkube\\\\nI0202 10\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:03Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.974649 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.974704 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.974722 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.974746 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.974762 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:03Z","lastTransitionTime":"2026-02-02T10:54:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.978038 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/dfa832ec-d3ca-4c0d-bef6-863867a95110-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-4rcnf\" (UID: \"dfa832ec-d3ca-4c0d-bef6-863867a95110\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.978692 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mz6f\" (UniqueName: \"kubernetes.io/projected/dfa832ec-d3ca-4c0d-bef6-863867a95110-kube-api-access-8mz6f\") pod \"ovnkube-control-plane-749d76644c-4rcnf\" (UID: \"dfa832ec-d3ca-4c0d-bef6-863867a95110\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.978963 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/dfa832ec-d3ca-4c0d-bef6-863867a95110-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-4rcnf\" (UID: \"dfa832ec-d3ca-4c0d-bef6-863867a95110\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.979042 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/dfa832ec-d3ca-4c0d-bef6-863867a95110-env-overrides\") pod \"ovnkube-control-plane-749d76644c-4rcnf\" (UID: \"dfa832ec-d3ca-4c0d-bef6-863867a95110\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" Feb 02 10:54:03 crc kubenswrapper[4838]: I0202 10:54:03.982046 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:03Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.001536 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:03Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.019353 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfa832ec-d3ca-4c0d-bef6-863867a95110\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4rcnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:04Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.034872 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:04Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.054801 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:04Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.076964 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:04Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.077958 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.078017 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.078039 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.078067 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.078088 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:04Z","lastTransitionTime":"2026-02-02T10:54:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.080944 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/dfa832ec-d3ca-4c0d-bef6-863867a95110-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-4rcnf\" (UID: \"dfa832ec-d3ca-4c0d-bef6-863867a95110\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.081011 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mz6f\" (UniqueName: \"kubernetes.io/projected/dfa832ec-d3ca-4c0d-bef6-863867a95110-kube-api-access-8mz6f\") pod \"ovnkube-control-plane-749d76644c-4rcnf\" (UID: \"dfa832ec-d3ca-4c0d-bef6-863867a95110\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.081069 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/dfa832ec-d3ca-4c0d-bef6-863867a95110-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-4rcnf\" (UID: \"dfa832ec-d3ca-4c0d-bef6-863867a95110\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.081107 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/dfa832ec-d3ca-4c0d-bef6-863867a95110-env-overrides\") pod \"ovnkube-control-plane-749d76644c-4rcnf\" (UID: \"dfa832ec-d3ca-4c0d-bef6-863867a95110\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.082229 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/dfa832ec-d3ca-4c0d-bef6-863867a95110-env-overrides\") pod \"ovnkube-control-plane-749d76644c-4rcnf\" (UID: \"dfa832ec-d3ca-4c0d-bef6-863867a95110\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.082306 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/dfa832ec-d3ca-4c0d-bef6-863867a95110-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-4rcnf\" (UID: \"dfa832ec-d3ca-4c0d-bef6-863867a95110\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.095590 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/dfa832ec-d3ca-4c0d-bef6-863867a95110-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-4rcnf\" (UID: \"dfa832ec-d3ca-4c0d-bef6-863867a95110\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.098144 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:04Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.116160 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mz6f\" (UniqueName: \"kubernetes.io/projected/dfa832ec-d3ca-4c0d-bef6-863867a95110-kube-api-access-8mz6f\") pod \"ovnkube-control-plane-749d76644c-4rcnf\" (UID: \"dfa832ec-d3ca-4c0d-bef6-863867a95110\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.127182 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:04Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.142095 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.151750 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:04Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:04 crc kubenswrapper[4838]: W0202 10:54:04.163040 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddfa832ec_d3ca_4c0d_bef6_863867a95110.slice/crio-4c06d5f001556394f948da7c6ef2dad48e199effbc833bc98e4488165012ebc4 WatchSource:0}: Error finding container 4c06d5f001556394f948da7c6ef2dad48e199effbc833bc98e4488165012ebc4: Status 404 returned error can't find the container with id 4c06d5f001556394f948da7c6ef2dad48e199effbc833bc98e4488165012ebc4 Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.181100 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.181178 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.181202 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.181238 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.181263 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:04Z","lastTransitionTime":"2026-02-02T10:54:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.283827 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.283892 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.283909 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.283935 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.283953 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:04Z","lastTransitionTime":"2026-02-02T10:54:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.386808 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.386863 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.386882 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.386904 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.386921 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:04Z","lastTransitionTime":"2026-02-02T10:54:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.460652 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 11:59:44.630224455 +0000 UTC Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.489453 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.489506 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.489524 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.489549 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.489567 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:04Z","lastTransitionTime":"2026-02-02T10:54:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.505085 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:04 crc kubenswrapper[4838]: E0202 10:54:04.505298 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.595097 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.595156 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.595236 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.595264 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.595282 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:04Z","lastTransitionTime":"2026-02-02T10:54:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.698883 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.698974 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.698998 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.699029 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.699053 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:04Z","lastTransitionTime":"2026-02-02T10:54:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.802134 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.802197 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.802213 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.802237 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.802254 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:04Z","lastTransitionTime":"2026-02-02T10:54:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.837524 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" event={"ID":"dfa832ec-d3ca-4c0d-bef6-863867a95110","Type":"ContainerStarted","Data":"4c06d5f001556394f948da7c6ef2dad48e199effbc833bc98e4488165012ebc4"} Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.840577 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovnkube-controller/1.log" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.841790 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovnkube-controller/0.log" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.848196 4838 generic.go:334] "Generic (PLEG): container finished" podID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerID="10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433" exitCode=1 Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.848269 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerDied","Data":"10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433"} Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.848345 4838 scope.go:117] "RemoveContainer" containerID="fcab7894fcb8b1055505140881ce36efa0cf71530b210c7a4cdeccad1f6b23f3" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.849519 4838 scope.go:117] "RemoveContainer" containerID="10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433" Feb 02 10:54:04 crc kubenswrapper[4838]: E0202 10:54:04.849841 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-66l9c_openshift-ovn-kubernetes(9bc00b9c-6e31-4f8e-b4ba-44150281ed69)\"" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.869766 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:04Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.887344 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:04Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.905015 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:04Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.905898 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.905959 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.905976 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.906001 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.906018 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:04Z","lastTransitionTime":"2026-02-02T10:54:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.922004 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:04Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.980721 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:04Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:04 crc kubenswrapper[4838]: I0202 10:54:04.995934 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:04Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.005480 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.008536 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.008564 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.008575 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.008591 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.008603 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:05Z","lastTransitionTime":"2026-02-02T10:54:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.022699 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.035564 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.064051 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcab7894fcb8b1055505140881ce36efa0cf71530b210c7a4cdeccad1f6b23f3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:01Z\\\",\\\"message\\\":\\\"pping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.267948 6129 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0202 10:54:01.268032 6129 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268173 6129 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268487 6129 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 10:54:01.268560 6129 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268822 6129 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 10:54:01.268898 6129 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 10:54:01.268998 6129 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 10:54:01.269079 6129 factory.go:656] Stopping watch factory\\\\nI0202 10:54:01.269147 6129 ovnkube.go:599] Stopped ovnkube\\\\nI0202 10\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:04Z\\\",\\\"message\\\":\\\".go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 10:54:02.869873 6304 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 10:54:02.869897 6304 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 10:54:02.869907 6304 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 10:54:02.869944 6304 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 10:54:02.869967 6304 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 10:54:02.870068 6304 factory.go:656] Stopping watch factory\\\\nI0202 10:54:02.870092 6304 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 10:54:02.870110 6304 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 10:54:02.870125 6304 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 10:54:02.870140 6304 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 10:54:02.870154 6304 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 10:54:02.870169 6304 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 10:54:02.873737 6304 ovnkube.go:599] Stopped ovnkube\\\\nI0202 10:54:02.873807 6304 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 10:54:02.873919 6304 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.087885 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.103976 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.111764 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.111827 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.111845 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.111875 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.111894 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:05Z","lastTransitionTime":"2026-02-02T10:54:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.120279 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfa832ec-d3ca-4c0d-bef6-863867a95110\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4rcnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.141561 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.162841 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.215391 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.215459 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.215476 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.215500 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.215518 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:05Z","lastTransitionTime":"2026-02-02T10:54:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.318513 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.318778 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.318982 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.319232 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.319740 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:05Z","lastTransitionTime":"2026-02-02T10:54:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.354590 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-kdnnp"] Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.355895 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:05 crc kubenswrapper[4838]: E0202 10:54:05.356003 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.379576 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.399340 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.421087 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.423094 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.423316 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.423498 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.423678 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.423816 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:05Z","lastTransitionTime":"2026-02-02T10:54:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.439825 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.460860 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 03:35:21.935475042 +0000 UTC Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.460906 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.484458 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.497386 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hhdv\" (UniqueName: \"kubernetes.io/projected/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-kube-api-access-4hhdv\") pod \"network-metrics-daemon-kdnnp\" (UID: \"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\") " pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.497449 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs\") pod \"network-metrics-daemon-kdnnp\" (UID: \"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\") " pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.502426 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.505341 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.505500 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:05 crc kubenswrapper[4838]: E0202 10:54:05.505715 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:05 crc kubenswrapper[4838]: E0202 10:54:05.505933 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.527705 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.527749 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.527766 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.527792 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.527810 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:05Z","lastTransitionTime":"2026-02-02T10:54:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.538469 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcab7894fcb8b1055505140881ce36efa0cf71530b210c7a4cdeccad1f6b23f3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:01Z\\\",\\\"message\\\":\\\"pping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.267948 6129 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0202 10:54:01.268032 6129 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268173 6129 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268487 6129 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 10:54:01.268560 6129 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268822 6129 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 10:54:01.268898 6129 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 10:54:01.268998 6129 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 10:54:01.269079 6129 factory.go:656] Stopping watch factory\\\\nI0202 10:54:01.269147 6129 ovnkube.go:599] Stopped ovnkube\\\\nI0202 10\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:04Z\\\",\\\"message\\\":\\\".go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 10:54:02.869873 6304 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 10:54:02.869897 6304 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 10:54:02.869907 6304 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 10:54:02.869944 6304 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 10:54:02.869967 6304 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 10:54:02.870068 6304 factory.go:656] Stopping watch factory\\\\nI0202 10:54:02.870092 6304 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 10:54:02.870110 6304 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 10:54:02.870125 6304 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 10:54:02.870140 6304 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 10:54:02.870154 6304 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 10:54:02.870169 6304 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 10:54:02.873737 6304 ovnkube.go:599] Stopped ovnkube\\\\nI0202 10:54:02.873807 6304 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 10:54:02.873919 6304 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.562554 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.583565 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.599129 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hhdv\" (UniqueName: \"kubernetes.io/projected/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-kube-api-access-4hhdv\") pod \"network-metrics-daemon-kdnnp\" (UID: \"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\") " pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.599194 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs\") pod \"network-metrics-daemon-kdnnp\" (UID: \"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\") " pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:05 crc kubenswrapper[4838]: E0202 10:54:05.599513 4838 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 10:54:05 crc kubenswrapper[4838]: E0202 10:54:05.599609 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs podName:c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba nodeName:}" failed. No retries permitted until 2026-02-02 10:54:06.099585583 +0000 UTC m=+40.436686651 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs") pod "network-metrics-daemon-kdnnp" (UID: "c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.603109 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.628078 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.630590 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.630691 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.630713 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.630739 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.630759 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:05Z","lastTransitionTime":"2026-02-02T10:54:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.633121 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hhdv\" (UniqueName: \"kubernetes.io/projected/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-kube-api-access-4hhdv\") pod \"network-metrics-daemon-kdnnp\" (UID: \"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\") " pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.655383 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.677990 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.697474 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfa832ec-d3ca-4c0d-bef6-863867a95110\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4rcnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.718743 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kdnnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kdnnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.734083 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.734153 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.734177 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.734209 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.734232 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:05Z","lastTransitionTime":"2026-02-02T10:54:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.837189 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.837242 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.837260 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.837284 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.837307 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:05Z","lastTransitionTime":"2026-02-02T10:54:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.853821 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovnkube-controller/1.log" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.859807 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" event={"ID":"dfa832ec-d3ca-4c0d-bef6-863867a95110","Type":"ContainerStarted","Data":"bf49d48779cd69118d1a7fedcb7a6fe58fcd9d9dafaf70cdf9d3d61ce95d80a3"} Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.859880 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" event={"ID":"dfa832ec-d3ca-4c0d-bef6-863867a95110","Type":"ContainerStarted","Data":"4b6bb0c34e93fb1ae0885c96c1936812c8faf43e78eae2d53f429c322118857b"} Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.880934 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.902396 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.921668 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.940525 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.940591 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.940608 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.940701 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.940720 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:05Z","lastTransitionTime":"2026-02-02T10:54:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.941565 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.961957 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:05 crc kubenswrapper[4838]: I0202 10:54:05.979908 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:05Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.002485 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.035866 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.042966 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.042999 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.043010 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.043027 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.043038 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:06Z","lastTransitionTime":"2026-02-02T10:54:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.069083 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcab7894fcb8b1055505140881ce36efa0cf71530b210c7a4cdeccad1f6b23f3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:01Z\\\",\\\"message\\\":\\\"pping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.267948 6129 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0202 10:54:01.268032 6129 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268173 6129 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268487 6129 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 10:54:01.268560 6129 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268822 6129 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 10:54:01.268898 6129 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 10:54:01.268998 6129 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 10:54:01.269079 6129 factory.go:656] Stopping watch factory\\\\nI0202 10:54:01.269147 6129 ovnkube.go:599] Stopped ovnkube\\\\nI0202 10\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:04Z\\\",\\\"message\\\":\\\".go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 10:54:02.869873 6304 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 10:54:02.869897 6304 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 10:54:02.869907 6304 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 10:54:02.869944 6304 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 10:54:02.869967 6304 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 10:54:02.870068 6304 factory.go:656] Stopping watch factory\\\\nI0202 10:54:02.870092 6304 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 10:54:02.870110 6304 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 10:54:02.870125 6304 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 10:54:02.870140 6304 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 10:54:02.870154 6304 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 10:54:02.870169 6304 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 10:54:02.873737 6304 ovnkube.go:599] Stopped ovnkube\\\\nI0202 10:54:02.873807 6304 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 10:54:02.873919 6304 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.084886 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.097460 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.104275 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs\") pod \"network-metrics-daemon-kdnnp\" (UID: \"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\") " pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:06 crc kubenswrapper[4838]: E0202 10:54:06.104494 4838 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 10:54:06 crc kubenswrapper[4838]: E0202 10:54:06.104662 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs podName:c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba nodeName:}" failed. No retries permitted until 2026-02-02 10:54:07.104590788 +0000 UTC m=+41.441691856 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs") pod "network-metrics-daemon-kdnnp" (UID: "c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.111890 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.122535 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.141713 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.145332 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.145457 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.145481 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.145506 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.145525 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:06Z","lastTransitionTime":"2026-02-02T10:54:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.159557 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfa832ec-d3ca-4c0d-bef6-863867a95110\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b6bb0c34e93fb1ae0885c96c1936812c8faf43e78eae2d53f429c322118857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf49d48779cd69118d1a7fedcb7a6fe58fcd9d9dafaf70cdf9d3d61ce95d80a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4rcnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.174934 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kdnnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kdnnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.247992 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.248032 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.248043 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.248060 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.248071 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:06Z","lastTransitionTime":"2026-02-02T10:54:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.350908 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.350969 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.350988 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.351012 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.351033 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:06Z","lastTransitionTime":"2026-02-02T10:54:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.453257 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.453308 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.453320 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.453339 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.453352 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:06Z","lastTransitionTime":"2026-02-02T10:54:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.462676 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 07:25:54.434842762 +0000 UTC Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.505289 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:06 crc kubenswrapper[4838]: E0202 10:54:06.505408 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.522347 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.544228 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.555906 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.555971 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.555988 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.556012 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.556032 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:06Z","lastTransitionTime":"2026-02-02T10:54:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.560430 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.590451 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcab7894fcb8b1055505140881ce36efa0cf71530b210c7a4cdeccad1f6b23f3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:01Z\\\",\\\"message\\\":\\\"pping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.267948 6129 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0202 10:54:01.268032 6129 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268173 6129 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268487 6129 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 10:54:01.268560 6129 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268822 6129 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 10:54:01.268898 6129 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 10:54:01.268998 6129 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 10:54:01.269079 6129 factory.go:656] Stopping watch factory\\\\nI0202 10:54:01.269147 6129 ovnkube.go:599] Stopped ovnkube\\\\nI0202 10\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:04Z\\\",\\\"message\\\":\\\".go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 10:54:02.869873 6304 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 10:54:02.869897 6304 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 10:54:02.869907 6304 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 10:54:02.869944 6304 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 10:54:02.869967 6304 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 10:54:02.870068 6304 factory.go:656] Stopping watch factory\\\\nI0202 10:54:02.870092 6304 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 10:54:02.870110 6304 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 10:54:02.870125 6304 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 10:54:02.870140 6304 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 10:54:02.870154 6304 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 10:54:02.870169 6304 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 10:54:02.873737 6304 ovnkube.go:599] Stopped ovnkube\\\\nI0202 10:54:02.873807 6304 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 10:54:02.873919 6304 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.613536 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.633111 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.652581 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.658717 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.658770 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.658787 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.658809 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.658827 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:06Z","lastTransitionTime":"2026-02-02T10:54:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.675159 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.694668 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfa832ec-d3ca-4c0d-bef6-863867a95110\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b6bb0c34e93fb1ae0885c96c1936812c8faf43e78eae2d53f429c322118857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf49d48779cd69118d1a7fedcb7a6fe58fcd9d9dafaf70cdf9d3d61ce95d80a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4rcnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.715578 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kdnnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kdnnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.733884 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.752436 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.761030 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.761106 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.761130 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.761166 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.761190 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:06Z","lastTransitionTime":"2026-02-02T10:54:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.771466 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.796468 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.814246 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.831969 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:06Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.863197 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.863280 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.863301 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.863326 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.863343 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:06Z","lastTransitionTime":"2026-02-02T10:54:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.966528 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.966580 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.966600 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.966664 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:06 crc kubenswrapper[4838]: I0202 10:54:06.966690 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:06Z","lastTransitionTime":"2026-02-02T10:54:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.069773 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.069831 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.069848 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.069874 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.069892 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:07Z","lastTransitionTime":"2026-02-02T10:54:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.114378 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs\") pod \"network-metrics-daemon-kdnnp\" (UID: \"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\") " pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:07 crc kubenswrapper[4838]: E0202 10:54:07.114575 4838 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 10:54:07 crc kubenswrapper[4838]: E0202 10:54:07.114694 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs podName:c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba nodeName:}" failed. No retries permitted until 2026-02-02 10:54:09.11467179 +0000 UTC m=+43.451772858 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs") pod "network-metrics-daemon-kdnnp" (UID: "c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.172531 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.172588 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.172604 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.172663 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.172684 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:07Z","lastTransitionTime":"2026-02-02T10:54:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.276203 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.276511 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.276533 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.276560 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.276584 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:07Z","lastTransitionTime":"2026-02-02T10:54:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.379354 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.379408 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.379429 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.379459 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.379481 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:07Z","lastTransitionTime":"2026-02-02T10:54:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.463797 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 04:48:17.034457802 +0000 UTC Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.530858 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.530867 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:07 crc kubenswrapper[4838]: E0202 10:54:07.531079 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.530893 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:07 crc kubenswrapper[4838]: E0202 10:54:07.531238 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:07 crc kubenswrapper[4838]: E0202 10:54:07.531337 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.532875 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.532912 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.532925 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.532941 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.532955 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:07Z","lastTransitionTime":"2026-02-02T10:54:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.635742 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.635806 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.635827 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.635853 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.635871 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:07Z","lastTransitionTime":"2026-02-02T10:54:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.739116 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.739190 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.739212 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.739244 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.739266 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:07Z","lastTransitionTime":"2026-02-02T10:54:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.841521 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.841571 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.841587 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.841610 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.841663 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:07Z","lastTransitionTime":"2026-02-02T10:54:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.945251 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.945794 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.945999 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.946191 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:07 crc kubenswrapper[4838]: I0202 10:54:07.946375 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:07Z","lastTransitionTime":"2026-02-02T10:54:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.050371 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.050427 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.050443 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.050466 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.050482 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:08Z","lastTransitionTime":"2026-02-02T10:54:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.152835 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.153322 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.153488 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.153710 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.153923 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:08Z","lastTransitionTime":"2026-02-02T10:54:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.256800 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.256867 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.256884 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.256916 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.256933 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:08Z","lastTransitionTime":"2026-02-02T10:54:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.361119 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.361182 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.361201 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.361228 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.361247 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:08Z","lastTransitionTime":"2026-02-02T10:54:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.463972 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 16:39:34.065257963 +0000 UTC Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.464138 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.464180 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.464199 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.464222 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.464240 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:08Z","lastTransitionTime":"2026-02-02T10:54:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.505770 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:08 crc kubenswrapper[4838]: E0202 10:54:08.505952 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.567809 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.567894 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.567918 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.567953 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.567979 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:08Z","lastTransitionTime":"2026-02-02T10:54:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.670831 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.670890 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.670909 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.670935 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.670953 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:08Z","lastTransitionTime":"2026-02-02T10:54:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.773773 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.773844 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.773861 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.773888 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.773906 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:08Z","lastTransitionTime":"2026-02-02T10:54:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.876030 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.876068 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.876078 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.876093 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.876104 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:08Z","lastTransitionTime":"2026-02-02T10:54:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.978407 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.978448 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.978457 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.978474 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:08 crc kubenswrapper[4838]: I0202 10:54:08.978485 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:08Z","lastTransitionTime":"2026-02-02T10:54:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.081121 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.081191 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.081215 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.081246 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.081268 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:09Z","lastTransitionTime":"2026-02-02T10:54:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.151192 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs\") pod \"network-metrics-daemon-kdnnp\" (UID: \"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\") " pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:09 crc kubenswrapper[4838]: E0202 10:54:09.151462 4838 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 10:54:09 crc kubenswrapper[4838]: E0202 10:54:09.151570 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs podName:c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba nodeName:}" failed. No retries permitted until 2026-02-02 10:54:13.151542782 +0000 UTC m=+47.488643840 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs") pod "network-metrics-daemon-kdnnp" (UID: "c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.184232 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.184285 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.184302 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.184325 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.184343 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:09Z","lastTransitionTime":"2026-02-02T10:54:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.286713 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.286767 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.286787 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.286810 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.286827 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:09Z","lastTransitionTime":"2026-02-02T10:54:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.390455 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.390531 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.390555 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.390583 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.390603 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:09Z","lastTransitionTime":"2026-02-02T10:54:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.464709 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 20:57:58.774099151 +0000 UTC Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.493324 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.493383 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.493400 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.493427 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.493446 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:09Z","lastTransitionTime":"2026-02-02T10:54:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.505671 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.505698 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.505684 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:09 crc kubenswrapper[4838]: E0202 10:54:09.505852 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:09 crc kubenswrapper[4838]: E0202 10:54:09.506046 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:09 crc kubenswrapper[4838]: E0202 10:54:09.506200 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.596326 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.596409 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.596432 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.596465 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.596487 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:09Z","lastTransitionTime":"2026-02-02T10:54:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.699603 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.699697 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.699714 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.699739 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.699764 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:09Z","lastTransitionTime":"2026-02-02T10:54:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.802740 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.802816 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.802838 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.802862 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.802880 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:09Z","lastTransitionTime":"2026-02-02T10:54:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.905528 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.905603 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.905663 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.905694 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:09 crc kubenswrapper[4838]: I0202 10:54:09.905716 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:09Z","lastTransitionTime":"2026-02-02T10:54:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.008111 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.008165 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.008186 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.008209 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.008226 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:10Z","lastTransitionTime":"2026-02-02T10:54:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.111580 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.111712 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.111752 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.111782 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.111802 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:10Z","lastTransitionTime":"2026-02-02T10:54:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.215281 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.215410 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.215435 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.215493 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.215511 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:10Z","lastTransitionTime":"2026-02-02T10:54:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.317533 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.317580 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.317590 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.317606 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.317632 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:10Z","lastTransitionTime":"2026-02-02T10:54:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.420313 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.420390 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.420408 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.420438 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.420459 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:10Z","lastTransitionTime":"2026-02-02T10:54:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.465174 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 20:34:01.396305801 +0000 UTC Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.505906 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:10 crc kubenswrapper[4838]: E0202 10:54:10.506058 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.522656 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.522718 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.522735 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.522757 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.522775 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:10Z","lastTransitionTime":"2026-02-02T10:54:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.626613 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.626753 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.626771 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.626800 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.626865 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:10Z","lastTransitionTime":"2026-02-02T10:54:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.729590 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.729697 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.729715 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.729773 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.729792 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:10Z","lastTransitionTime":"2026-02-02T10:54:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.834710 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.834788 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.834803 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.834824 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.834841 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:10Z","lastTransitionTime":"2026-02-02T10:54:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.937398 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.937486 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.937503 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.937557 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:10 crc kubenswrapper[4838]: I0202 10:54:10.937574 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:10Z","lastTransitionTime":"2026-02-02T10:54:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.040237 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.040305 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.040327 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.040354 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.040372 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:11Z","lastTransitionTime":"2026-02-02T10:54:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.142851 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.142920 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.142938 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.142963 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.142982 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:11Z","lastTransitionTime":"2026-02-02T10:54:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.245798 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.245853 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.245870 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.245894 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.245910 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:11Z","lastTransitionTime":"2026-02-02T10:54:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.349101 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.349164 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.349180 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.349206 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.349225 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:11Z","lastTransitionTime":"2026-02-02T10:54:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.452047 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.452095 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.452108 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.452124 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.452138 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:11Z","lastTransitionTime":"2026-02-02T10:54:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.465952 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 19:57:26.612203468 +0000 UTC Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.505062 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:11 crc kubenswrapper[4838]: E0202 10:54:11.505301 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.505077 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.505073 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:11 crc kubenswrapper[4838]: E0202 10:54:11.505461 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:11 crc kubenswrapper[4838]: E0202 10:54:11.505597 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.554811 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.554890 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.554912 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.554937 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.554956 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:11Z","lastTransitionTime":"2026-02-02T10:54:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.658232 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.658311 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.658330 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.658367 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.658389 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:11Z","lastTransitionTime":"2026-02-02T10:54:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.762388 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.762437 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.762450 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.762470 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.762482 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:11Z","lastTransitionTime":"2026-02-02T10:54:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.866188 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.866275 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.866294 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.866331 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.866356 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:11Z","lastTransitionTime":"2026-02-02T10:54:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.969811 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.969879 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.969897 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.969923 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:11 crc kubenswrapper[4838]: I0202 10:54:11.969941 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:11Z","lastTransitionTime":"2026-02-02T10:54:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.073074 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.073136 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.073157 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.073185 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.073205 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:12Z","lastTransitionTime":"2026-02-02T10:54:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.176104 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.176189 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.176213 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.176245 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.176268 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:12Z","lastTransitionTime":"2026-02-02T10:54:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.280390 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.280453 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.280470 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.280497 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.280515 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:12Z","lastTransitionTime":"2026-02-02T10:54:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.383854 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.383902 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.383919 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.383944 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.383961 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:12Z","lastTransitionTime":"2026-02-02T10:54:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.467043 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 16:04:26.501280047 +0000 UTC Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.486891 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.486918 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.486926 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.486941 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.486951 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:12Z","lastTransitionTime":"2026-02-02T10:54:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.505141 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:12 crc kubenswrapper[4838]: E0202 10:54:12.505307 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.590244 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.590295 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.590312 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.590335 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.590352 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:12Z","lastTransitionTime":"2026-02-02T10:54:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.693930 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.693986 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.694003 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.694029 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.694047 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:12Z","lastTransitionTime":"2026-02-02T10:54:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.797360 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.797426 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.797449 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.797480 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.797502 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:12Z","lastTransitionTime":"2026-02-02T10:54:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.900670 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.900736 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.900756 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.900785 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:12 crc kubenswrapper[4838]: I0202 10:54:12.900806 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:12Z","lastTransitionTime":"2026-02-02T10:54:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.004421 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.004492 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.004519 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.004549 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.004570 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:13Z","lastTransitionTime":"2026-02-02T10:54:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.108039 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.108141 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.108195 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.108220 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.108238 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:13Z","lastTransitionTime":"2026-02-02T10:54:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.134525 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.134595 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.134613 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.134669 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.134693 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:13Z","lastTransitionTime":"2026-02-02T10:54:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:13 crc kubenswrapper[4838]: E0202 10:54:13.154772 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:13Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.160659 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.160735 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.160764 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.160794 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.160816 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:13Z","lastTransitionTime":"2026-02-02T10:54:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:13 crc kubenswrapper[4838]: E0202 10:54:13.179748 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:13Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.183916 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.183993 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.184017 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.184052 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.184071 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:13Z","lastTransitionTime":"2026-02-02T10:54:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.198996 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs\") pod \"network-metrics-daemon-kdnnp\" (UID: \"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\") " pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:13 crc kubenswrapper[4838]: E0202 10:54:13.199312 4838 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 10:54:13 crc kubenswrapper[4838]: E0202 10:54:13.199680 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs podName:c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba nodeName:}" failed. No retries permitted until 2026-02-02 10:54:21.19958888 +0000 UTC m=+55.536689938 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs") pod "network-metrics-daemon-kdnnp" (UID: "c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 10:54:13 crc kubenswrapper[4838]: E0202 10:54:13.202810 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:13Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.208068 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.208131 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.208169 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.208201 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.208226 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:13Z","lastTransitionTime":"2026-02-02T10:54:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:13 crc kubenswrapper[4838]: E0202 10:54:13.222255 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:13Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.227123 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.227173 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.227186 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.227202 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.227214 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:13Z","lastTransitionTime":"2026-02-02T10:54:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:13 crc kubenswrapper[4838]: E0202 10:54:13.245063 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:13Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:13 crc kubenswrapper[4838]: E0202 10:54:13.245279 4838 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.247203 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.247254 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.247270 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.247293 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.247309 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:13Z","lastTransitionTime":"2026-02-02T10:54:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.358152 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.358663 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.358707 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.358739 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.358763 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:13Z","lastTransitionTime":"2026-02-02T10:54:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.461866 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.461942 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.461966 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.461998 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.462016 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:13Z","lastTransitionTime":"2026-02-02T10:54:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.467266 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 01:01:54.53759253 +0000 UTC Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.505238 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.505288 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.505332 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:13 crc kubenswrapper[4838]: E0202 10:54:13.505826 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:13 crc kubenswrapper[4838]: E0202 10:54:13.505951 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:13 crc kubenswrapper[4838]: E0202 10:54:13.505855 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.564197 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.564262 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.564280 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.564314 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.564337 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:13Z","lastTransitionTime":"2026-02-02T10:54:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.667140 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.667444 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.667527 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.667629 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.667725 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:13Z","lastTransitionTime":"2026-02-02T10:54:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.771205 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.771285 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.771311 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.771347 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.771370 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:13Z","lastTransitionTime":"2026-02-02T10:54:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.874927 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.875019 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.875038 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.875072 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.875095 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:13Z","lastTransitionTime":"2026-02-02T10:54:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.978394 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.978466 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.978483 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.978547 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:13 crc kubenswrapper[4838]: I0202 10:54:13.978572 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:13Z","lastTransitionTime":"2026-02-02T10:54:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.081918 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.082007 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.082032 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.082061 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.082083 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:14Z","lastTransitionTime":"2026-02-02T10:54:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.185179 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.185240 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.185262 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.185371 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.185400 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:14Z","lastTransitionTime":"2026-02-02T10:54:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.288028 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.288094 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.288115 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.288143 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.288167 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:14Z","lastTransitionTime":"2026-02-02T10:54:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.391048 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.391122 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.391146 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.391247 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.391277 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:14Z","lastTransitionTime":"2026-02-02T10:54:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.467406 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 23:59:58.993271558 +0000 UTC Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.494254 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.494323 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.494347 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.494373 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.494397 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:14Z","lastTransitionTime":"2026-02-02T10:54:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.505091 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:14 crc kubenswrapper[4838]: E0202 10:54:14.505288 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.598251 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.598332 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.598367 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.598398 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.598421 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:14Z","lastTransitionTime":"2026-02-02T10:54:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.701215 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.701275 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.701292 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.701317 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.701333 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:14Z","lastTransitionTime":"2026-02-02T10:54:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.804780 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.804901 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.804922 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.804945 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.804965 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:14Z","lastTransitionTime":"2026-02-02T10:54:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.906743 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.906806 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.906828 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.906858 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:14 crc kubenswrapper[4838]: I0202 10:54:14.906879 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:14Z","lastTransitionTime":"2026-02-02T10:54:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.009555 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.009658 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.009684 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.009716 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.009740 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:15Z","lastTransitionTime":"2026-02-02T10:54:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.113754 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.113820 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.113838 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.113862 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.113880 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:15Z","lastTransitionTime":"2026-02-02T10:54:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.217007 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.217063 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.217083 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.217106 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.217124 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:15Z","lastTransitionTime":"2026-02-02T10:54:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.320960 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.321026 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.321046 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.321073 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.321093 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:15Z","lastTransitionTime":"2026-02-02T10:54:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.430816 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.430869 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.430887 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.430909 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.430926 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:15Z","lastTransitionTime":"2026-02-02T10:54:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.467869 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 12:39:31.66376041 +0000 UTC Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.505607 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.505900 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.505904 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:15 crc kubenswrapper[4838]: E0202 10:54:15.506225 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:15 crc kubenswrapper[4838]: E0202 10:54:15.506386 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:15 crc kubenswrapper[4838]: E0202 10:54:15.506596 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.533538 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.533588 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.533605 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.533658 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.533702 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:15Z","lastTransitionTime":"2026-02-02T10:54:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.637416 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.637491 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.637514 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.637543 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.637566 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:15Z","lastTransitionTime":"2026-02-02T10:54:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.740382 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.740439 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.740457 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.740482 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.740500 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:15Z","lastTransitionTime":"2026-02-02T10:54:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.843550 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.843604 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.843686 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.843721 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.843742 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:15Z","lastTransitionTime":"2026-02-02T10:54:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.947028 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.947104 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.947122 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.947148 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:15 crc kubenswrapper[4838]: I0202 10:54:15.947167 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:15Z","lastTransitionTime":"2026-02-02T10:54:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.050042 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.050102 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.050121 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.050145 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.050162 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:16Z","lastTransitionTime":"2026-02-02T10:54:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.154512 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.154593 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.154651 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.154684 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.154705 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:16Z","lastTransitionTime":"2026-02-02T10:54:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.257895 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.257979 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.258006 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.258038 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.258066 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:16Z","lastTransitionTime":"2026-02-02T10:54:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.362165 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.362268 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.362321 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.362415 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.362436 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:16Z","lastTransitionTime":"2026-02-02T10:54:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.466226 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.466286 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.466300 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.466323 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.466342 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:16Z","lastTransitionTime":"2026-02-02T10:54:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.468507 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 18:50:06.825433252 +0000 UTC Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.505230 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:16 crc kubenswrapper[4838]: E0202 10:54:16.505838 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.506366 4838 scope.go:117] "RemoveContainer" containerID="10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.526245 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.553159 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.571133 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.571305 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.571324 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.571343 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.571355 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:16Z","lastTransitionTime":"2026-02-02T10:54:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.574830 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.590563 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.604078 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.619829 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.634086 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.658057 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcab7894fcb8b1055505140881ce36efa0cf71530b210c7a4cdeccad1f6b23f3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:01Z\\\",\\\"message\\\":\\\"pping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.267948 6129 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0202 10:54:01.268032 6129 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268173 6129 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268487 6129 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 10:54:01.268560 6129 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:01.268822 6129 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 10:54:01.268898 6129 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 10:54:01.268998 6129 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 10:54:01.269079 6129 factory.go:656] Stopping watch factory\\\\nI0202 10:54:01.269147 6129 ovnkube.go:599] Stopped ovnkube\\\\nI0202 10\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:04Z\\\",\\\"message\\\":\\\".go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 10:54:02.869873 6304 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 10:54:02.869897 6304 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 10:54:02.869907 6304 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 10:54:02.869944 6304 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 10:54:02.869967 6304 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 10:54:02.870068 6304 factory.go:656] Stopping watch factory\\\\nI0202 10:54:02.870092 6304 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 10:54:02.870110 6304 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 10:54:02.870125 6304 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 10:54:02.870140 6304 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 10:54:02.870154 6304 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 10:54:02.870169 6304 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 10:54:02.873737 6304 ovnkube.go:599] Stopped ovnkube\\\\nI0202 10:54:02.873807 6304 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 10:54:02.873919 6304 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.675031 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.675174 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.675201 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.675394 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.675426 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:16Z","lastTransitionTime":"2026-02-02T10:54:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.677001 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.693292 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.710347 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.725259 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.744332 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.760325 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfa832ec-d3ca-4c0d-bef6-863867a95110\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b6bb0c34e93fb1ae0885c96c1936812c8faf43e78eae2d53f429c322118857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf49d48779cd69118d1a7fedcb7a6fe58fcd9d9dafaf70cdf9d3d61ce95d80a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4rcnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.772203 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kdnnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kdnnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.778846 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.778877 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.778887 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.778904 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.778915 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:16Z","lastTransitionTime":"2026-02-02T10:54:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.785278 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.803480 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.821521 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.839106 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.866437 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.880836 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.880881 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.880892 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.880908 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.880922 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:16Z","lastTransitionTime":"2026-02-02T10:54:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.884916 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.906981 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.913230 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovnkube-controller/1.log" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.921312 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerStarted","Data":"7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc"} Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.921462 4838 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.925294 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.945870 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.967815 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.984350 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.984402 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.984418 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.984439 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.984454 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:16Z","lastTransitionTime":"2026-02-02T10:54:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:16 crc kubenswrapper[4838]: I0202 10:54:16.986183 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:16Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.009749 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:04Z\\\",\\\"message\\\":\\\".go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 10:54:02.869873 6304 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 10:54:02.869897 6304 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 10:54:02.869907 6304 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 10:54:02.869944 6304 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 10:54:02.869967 6304 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 10:54:02.870068 6304 factory.go:656] Stopping watch factory\\\\nI0202 10:54:02.870092 6304 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 10:54:02.870110 6304 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 10:54:02.870125 6304 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 10:54:02.870140 6304 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 10:54:02.870154 6304 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 10:54:02.870169 6304 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 10:54:02.873737 6304 ovnkube.go:599] Stopped ovnkube\\\\nI0202 10:54:02.873807 6304 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 10:54:02.873919 6304 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-66l9c_openshift-ovn-kubernetes(9bc00b9c-6e31-4f8e-b4ba-44150281ed69)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.027687 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.041467 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.054332 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfa832ec-d3ca-4c0d-bef6-863867a95110\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b6bb0c34e93fb1ae0885c96c1936812c8faf43e78eae2d53f429c322118857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf49d48779cd69118d1a7fedcb7a6fe58fcd9d9dafaf70cdf9d3d61ce95d80a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4rcnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.067244 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kdnnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kdnnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.080824 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.086653 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.086688 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.086698 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.086714 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.086724 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:17Z","lastTransitionTime":"2026-02-02T10:54:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.093543 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.111753 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.129767 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.146037 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.160231 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.174277 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.185206 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.189245 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.189374 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.189457 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.189550 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.189654 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:17Z","lastTransitionTime":"2026-02-02T10:54:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.198066 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.212756 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.226726 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.247700 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:04Z\\\",\\\"message\\\":\\\".go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 10:54:02.869873 6304 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 10:54:02.869897 6304 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 10:54:02.869907 6304 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 10:54:02.869944 6304 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 10:54:02.869967 6304 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 10:54:02.870068 6304 factory.go:656] Stopping watch factory\\\\nI0202 10:54:02.870092 6304 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 10:54:02.870110 6304 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 10:54:02.870125 6304 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 10:54:02.870140 6304 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 10:54:02.870154 6304 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 10:54:02.870169 6304 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 10:54:02.873737 6304 ovnkube.go:599] Stopped ovnkube\\\\nI0202 10:54:02.873807 6304 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 10:54:02.873919 6304 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.268937 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.282136 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.292165 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.292210 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.292222 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.292240 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.292252 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:17Z","lastTransitionTime":"2026-02-02T10:54:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.295156 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfa832ec-d3ca-4c0d-bef6-863867a95110\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b6bb0c34e93fb1ae0885c96c1936812c8faf43e78eae2d53f429c322118857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf49d48779cd69118d1a7fedcb7a6fe58fcd9d9dafaf70cdf9d3d61ce95d80a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4rcnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.309180 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kdnnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kdnnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.323082 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.394444 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.394482 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.394493 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.394508 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.394519 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:17Z","lastTransitionTime":"2026-02-02T10:54:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.469259 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 21:39:44.616490151 +0000 UTC Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.496511 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.496574 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.496584 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.496629 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.496639 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:17Z","lastTransitionTime":"2026-02-02T10:54:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.505021 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.505070 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.505106 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:17 crc kubenswrapper[4838]: E0202 10:54:17.505203 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:17 crc kubenswrapper[4838]: E0202 10:54:17.505294 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:17 crc kubenswrapper[4838]: E0202 10:54:17.505357 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.599840 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.599879 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.599887 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.599904 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.599915 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:17Z","lastTransitionTime":"2026-02-02T10:54:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.703278 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.703351 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.703368 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.703394 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.703413 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:17Z","lastTransitionTime":"2026-02-02T10:54:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.806965 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.807029 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.807049 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.807075 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.807093 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:17Z","lastTransitionTime":"2026-02-02T10:54:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.910213 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.910277 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.910294 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.910321 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.910337 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:17Z","lastTransitionTime":"2026-02-02T10:54:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.928419 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovnkube-controller/2.log" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.929224 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovnkube-controller/1.log" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.934013 4838 generic.go:334] "Generic (PLEG): container finished" podID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerID="7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc" exitCode=1 Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.934074 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerDied","Data":"7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc"} Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.934128 4838 scope.go:117] "RemoveContainer" containerID="10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.936196 4838 scope.go:117] "RemoveContainer" containerID="7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc" Feb 02 10:54:17 crc kubenswrapper[4838]: E0202 10:54:17.936528 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-66l9c_openshift-ovn-kubernetes(9bc00b9c-6e31-4f8e-b4ba-44150281ed69)\"" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.958208 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.975154 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfa832ec-d3ca-4c0d-bef6-863867a95110\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b6bb0c34e93fb1ae0885c96c1936812c8faf43e78eae2d53f429c322118857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf49d48779cd69118d1a7fedcb7a6fe58fcd9d9dafaf70cdf9d3d61ce95d80a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4rcnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:17 crc kubenswrapper[4838]: I0202 10:54:17.992115 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kdnnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kdnnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:17Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.013107 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.013933 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.013989 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.014008 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.014033 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.014049 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:18Z","lastTransitionTime":"2026-02-02T10:54:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.034604 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.053589 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.054908 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.074883 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.094208 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.107045 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.116767 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.116816 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.116834 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.116862 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.116880 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:18Z","lastTransitionTime":"2026-02-02T10:54:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.118609 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.121784 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.140891 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.155554 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.174838 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:04Z\\\",\\\"message\\\":\\\".go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 10:54:02.869873 6304 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 10:54:02.869897 6304 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 10:54:02.869907 6304 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 10:54:02.869944 6304 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 10:54:02.869967 6304 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 10:54:02.870068 6304 factory.go:656] Stopping watch factory\\\\nI0202 10:54:02.870092 6304 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 10:54:02.870110 6304 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 10:54:02.870125 6304 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 10:54:02.870140 6304 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 10:54:02.870154 6304 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 10:54:02.870169 6304 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 10:54:02.873737 6304 ovnkube.go:599] Stopped ovnkube\\\\nI0202 10:54:02.873807 6304 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 10:54:02.873919 6304 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:17Z\\\",\\\"message\\\":\\\"ving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0077f672b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:machine-mtrc,Protocol:TCP,Port:8441,TargetPort:{1 0 machine-mtrc},NodePort:0,AppProtocol:nil,},ServicePort{Name:machineset-mtrc,Protocol:TCP,Port:8442,TargetPort:{1 0 machineset-mtrc},NodePort:0,AppProtocol:nil,},ServicePort{Name:mhc-mtrc,Protocol:TCP,Port:8444,TargetPort:{1 0 mhc-mtrc},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{k8s-app: controller,},ClusterIP:10.217.4.167,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.167],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0202 10:54:17.353894 6508 services_controller.go:451] Built service openshift-machine-api/machine-api-operator-webhook cluster-wide L\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.194194 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.209856 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.219793 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.219854 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.219871 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.219898 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.219914 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:18Z","lastTransitionTime":"2026-02-02T10:54:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.226579 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.242742 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.260319 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.279760 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.296057 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.317557 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.322762 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.322801 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.322814 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.322835 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.322849 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:18Z","lastTransitionTime":"2026-02-02T10:54:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.332366 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.348855 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05f6144-f55e-4e08-9104-3730000613bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bae30f81dbb217dd45987bdd5ba01a01d13b8c602153c734f27336412c5a397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9265e112e7858143bc9067c7d9b1d00cab82fc64a1e306c175d620d699c94a36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ee92ab7d9ad96ff606c9e549e3d99a3602fd91c10a4ec7ebeb07932825d521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.362665 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.375456 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.390785 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.407535 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.425690 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.425751 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.425777 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.425806 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.425893 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:18Z","lastTransitionTime":"2026-02-02T10:54:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.435417 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10e5588991d7a9353b9a8e675fca93bfac2b62e459b8e14a027eecc99e18f433\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:04Z\\\",\\\"message\\\":\\\".go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 10:54:02.869873 6304 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 10:54:02.869897 6304 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 10:54:02.869907 6304 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 10:54:02.869944 6304 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 10:54:02.869967 6304 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 10:54:02.870068 6304 factory.go:656] Stopping watch factory\\\\nI0202 10:54:02.870092 6304 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 10:54:02.870110 6304 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 10:54:02.870125 6304 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 10:54:02.870140 6304 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 10:54:02.870154 6304 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 10:54:02.870169 6304 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 10:54:02.873737 6304 ovnkube.go:599] Stopped ovnkube\\\\nI0202 10:54:02.873807 6304 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 10:54:02.873919 6304 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:17Z\\\",\\\"message\\\":\\\"ving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0077f672b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:machine-mtrc,Protocol:TCP,Port:8441,TargetPort:{1 0 machine-mtrc},NodePort:0,AppProtocol:nil,},ServicePort{Name:machineset-mtrc,Protocol:TCP,Port:8442,TargetPort:{1 0 machineset-mtrc},NodePort:0,AppProtocol:nil,},ServicePort{Name:mhc-mtrc,Protocol:TCP,Port:8444,TargetPort:{1 0 mhc-mtrc},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{k8s-app: controller,},ClusterIP:10.217.4.167,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.167],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0202 10:54:17.353894 6508 services_controller.go:451] Built service openshift-machine-api/machine-api-operator-webhook cluster-wide L\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.460124 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.469436 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 22:42:16.955399946 +0000 UTC Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.478133 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.495455 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfa832ec-d3ca-4c0d-bef6-863867a95110\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b6bb0c34e93fb1ae0885c96c1936812c8faf43e78eae2d53f429c322118857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf49d48779cd69118d1a7fedcb7a6fe58fcd9d9dafaf70cdf9d3d61ce95d80a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4rcnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.505348 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:18 crc kubenswrapper[4838]: E0202 10:54:18.505522 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.510832 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kdnnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kdnnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.525225 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.527839 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.527871 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.527882 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.527898 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.527910 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:18Z","lastTransitionTime":"2026-02-02T10:54:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.538247 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.630635 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.630682 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.630696 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.630714 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.630727 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:18Z","lastTransitionTime":"2026-02-02T10:54:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.733832 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.733894 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.733911 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.733940 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.733959 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:18Z","lastTransitionTime":"2026-02-02T10:54:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.836706 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.836794 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.836811 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.836835 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.836851 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:18Z","lastTransitionTime":"2026-02-02T10:54:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.939212 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.939298 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.939324 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.939356 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.939383 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:18Z","lastTransitionTime":"2026-02-02T10:54:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.940440 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovnkube-controller/2.log" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.945059 4838 scope.go:117] "RemoveContainer" containerID="7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc" Feb 02 10:54:18 crc kubenswrapper[4838]: E0202 10:54:18.945317 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-66l9c_openshift-ovn-kubernetes(9bc00b9c-6e31-4f8e-b4ba-44150281ed69)\"" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.962436 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:18 crc kubenswrapper[4838]: I0202 10:54:18.983853 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.001742 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:18Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.042510 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.042555 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.042572 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.042596 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.042613 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:19Z","lastTransitionTime":"2026-02-02T10:54:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.049180 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:17Z\\\",\\\"message\\\":\\\"ving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0077f672b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:machine-mtrc,Protocol:TCP,Port:8441,TargetPort:{1 0 machine-mtrc},NodePort:0,AppProtocol:nil,},ServicePort{Name:machineset-mtrc,Protocol:TCP,Port:8442,TargetPort:{1 0 machineset-mtrc},NodePort:0,AppProtocol:nil,},ServicePort{Name:mhc-mtrc,Protocol:TCP,Port:8444,TargetPort:{1 0 mhc-mtrc},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{k8s-app: controller,},ClusterIP:10.217.4.167,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.167],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0202 10:54:17.353894 6508 services_controller.go:451] Built service openshift-machine-api/machine-api-operator-webhook cluster-wide L\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-66l9c_openshift-ovn-kubernetes(9bc00b9c-6e31-4f8e-b4ba-44150281ed69)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:19Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.086993 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:19Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.103269 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05f6144-f55e-4e08-9104-3730000613bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bae30f81dbb217dd45987bdd5ba01a01d13b8c602153c734f27336412c5a397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9265e112e7858143bc9067c7d9b1d00cab82fc64a1e306c175d620d699c94a36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ee92ab7d9ad96ff606c9e549e3d99a3602fd91c10a4ec7ebeb07932825d521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:19Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.122311 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:19Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.135008 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:19Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.145215 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.145270 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.145288 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.145314 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.145347 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:19Z","lastTransitionTime":"2026-02-02T10:54:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.150790 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:19Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.165467 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfa832ec-d3ca-4c0d-bef6-863867a95110\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b6bb0c34e93fb1ae0885c96c1936812c8faf43e78eae2d53f429c322118857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf49d48779cd69118d1a7fedcb7a6fe58fcd9d9dafaf70cdf9d3d61ce95d80a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4rcnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:19Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.179884 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kdnnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kdnnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:19Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.200162 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:19Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.220681 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:19Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.240559 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:19Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.248432 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.248482 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.248500 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.248523 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.248541 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:19Z","lastTransitionTime":"2026-02-02T10:54:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.263111 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:19Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.271656 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.271828 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.271918 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:19 crc kubenswrapper[4838]: E0202 10:54:19.272071 4838 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 10:54:19 crc kubenswrapper[4838]: E0202 10:54:19.272147 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 10:54:51.272127974 +0000 UTC m=+85.609229042 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 10:54:19 crc kubenswrapper[4838]: E0202 10:54:19.272230 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:54:51.272218797 +0000 UTC m=+85.609319865 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:54:19 crc kubenswrapper[4838]: E0202 10:54:19.272280 4838 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 10:54:19 crc kubenswrapper[4838]: E0202 10:54:19.272318 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 10:54:51.272306739 +0000 UTC m=+85.609407797 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.283582 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:19Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.304591 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:19Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.351011 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.351038 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.351047 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.351059 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.351067 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:19Z","lastTransitionTime":"2026-02-02T10:54:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.372505 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.372537 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:19 crc kubenswrapper[4838]: E0202 10:54:19.372663 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 10:54:19 crc kubenswrapper[4838]: E0202 10:54:19.372678 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 10:54:19 crc kubenswrapper[4838]: E0202 10:54:19.372688 4838 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:54:19 crc kubenswrapper[4838]: E0202 10:54:19.372730 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 10:54:51.372717914 +0000 UTC m=+85.709818942 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:54:19 crc kubenswrapper[4838]: E0202 10:54:19.372878 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 10:54:19 crc kubenswrapper[4838]: E0202 10:54:19.372937 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 10:54:19 crc kubenswrapper[4838]: E0202 10:54:19.372968 4838 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:54:19 crc kubenswrapper[4838]: E0202 10:54:19.373098 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 10:54:51.373053683 +0000 UTC m=+85.710154761 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.453408 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.453464 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.453481 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.453506 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.453524 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:19Z","lastTransitionTime":"2026-02-02T10:54:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.470113 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 12:14:07.122315247 +0000 UTC Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.505813 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.505827 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.505962 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:19 crc kubenswrapper[4838]: E0202 10:54:19.506148 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:19 crc kubenswrapper[4838]: E0202 10:54:19.506348 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:19 crc kubenswrapper[4838]: E0202 10:54:19.506451 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.557093 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.557160 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.557185 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.557217 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.557272 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:19Z","lastTransitionTime":"2026-02-02T10:54:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.660941 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.661055 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.661074 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.661867 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.661957 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:19Z","lastTransitionTime":"2026-02-02T10:54:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.764770 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.764841 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.764865 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.764893 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.764913 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:19Z","lastTransitionTime":"2026-02-02T10:54:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.867382 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.867454 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.867470 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.867495 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.867512 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:19Z","lastTransitionTime":"2026-02-02T10:54:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.970337 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.970388 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.970402 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.970420 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:19 crc kubenswrapper[4838]: I0202 10:54:19.970433 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:19Z","lastTransitionTime":"2026-02-02T10:54:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.073741 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.073785 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.073803 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.073821 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.073834 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:20Z","lastTransitionTime":"2026-02-02T10:54:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.176421 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.176455 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.176464 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.176477 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.176486 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:20Z","lastTransitionTime":"2026-02-02T10:54:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.279000 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.279051 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.279067 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.279085 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.279099 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:20Z","lastTransitionTime":"2026-02-02T10:54:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.382451 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.382650 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.382674 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.382697 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.382714 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:20Z","lastTransitionTime":"2026-02-02T10:54:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.471377 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 01:44:41.43724106 +0000 UTC Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.485590 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.485685 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.485710 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.485739 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.485761 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:20Z","lastTransitionTime":"2026-02-02T10:54:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.505010 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:20 crc kubenswrapper[4838]: E0202 10:54:20.505234 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.588945 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.589077 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.589108 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.589136 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.589158 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:20Z","lastTransitionTime":"2026-02-02T10:54:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.692768 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.692838 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.692862 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.692897 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.692925 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:20Z","lastTransitionTime":"2026-02-02T10:54:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.795455 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.795595 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.795666 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.795695 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.795714 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:20Z","lastTransitionTime":"2026-02-02T10:54:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.898573 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.898703 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.898724 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.898748 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:20 crc kubenswrapper[4838]: I0202 10:54:20.898764 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:20Z","lastTransitionTime":"2026-02-02T10:54:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.002199 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.002262 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.002282 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.002308 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.002324 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:21Z","lastTransitionTime":"2026-02-02T10:54:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.105463 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.105517 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.105535 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.105562 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.105581 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:21Z","lastTransitionTime":"2026-02-02T10:54:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.208515 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.208582 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.208600 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.208648 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.208669 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:21Z","lastTransitionTime":"2026-02-02T10:54:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.293348 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs\") pod \"network-metrics-daemon-kdnnp\" (UID: \"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\") " pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:21 crc kubenswrapper[4838]: E0202 10:54:21.293509 4838 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 10:54:21 crc kubenswrapper[4838]: E0202 10:54:21.293593 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs podName:c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba nodeName:}" failed. No retries permitted until 2026-02-02 10:54:37.293574631 +0000 UTC m=+71.630675659 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs") pod "network-metrics-daemon-kdnnp" (UID: "c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.311116 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.311180 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.311203 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.311234 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.311258 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:21Z","lastTransitionTime":"2026-02-02T10:54:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.414290 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.414359 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.414376 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.414401 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.414419 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:21Z","lastTransitionTime":"2026-02-02T10:54:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.471805 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 09:46:34.360996399 +0000 UTC Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.505255 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.505255 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:21 crc kubenswrapper[4838]: E0202 10:54:21.505434 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:21 crc kubenswrapper[4838]: E0202 10:54:21.505510 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.505284 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:21 crc kubenswrapper[4838]: E0202 10:54:21.505666 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.518611 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.518704 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.518722 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.518748 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.518769 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:21Z","lastTransitionTime":"2026-02-02T10:54:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.621229 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.621298 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.621316 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.621339 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.621357 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:21Z","lastTransitionTime":"2026-02-02T10:54:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.724016 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.724091 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.724116 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.724215 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.724280 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:21Z","lastTransitionTime":"2026-02-02T10:54:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.827277 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.827340 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.827357 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.827381 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.827397 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:21Z","lastTransitionTime":"2026-02-02T10:54:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.930086 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.930156 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.930175 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.930197 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:21 crc kubenswrapper[4838]: I0202 10:54:21.930214 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:21Z","lastTransitionTime":"2026-02-02T10:54:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.032735 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.032778 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.032789 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.032806 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.032818 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:22Z","lastTransitionTime":"2026-02-02T10:54:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.135467 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.135510 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.135553 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.135570 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.135580 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:22Z","lastTransitionTime":"2026-02-02T10:54:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.239255 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.239339 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.239364 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.239394 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.239414 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:22Z","lastTransitionTime":"2026-02-02T10:54:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.342273 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.342322 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.342338 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.342360 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.342376 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:22Z","lastTransitionTime":"2026-02-02T10:54:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.445605 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.445689 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.445706 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.445730 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.445748 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:22Z","lastTransitionTime":"2026-02-02T10:54:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.472608 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 04:26:53.422686931 +0000 UTC Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.505730 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:22 crc kubenswrapper[4838]: E0202 10:54:22.505928 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.548729 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.548794 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.548817 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.548846 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.548870 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:22Z","lastTransitionTime":"2026-02-02T10:54:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.653202 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.653329 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.653354 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.653385 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.653407 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:22Z","lastTransitionTime":"2026-02-02T10:54:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.756051 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.756101 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.756115 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.756133 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.756149 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:22Z","lastTransitionTime":"2026-02-02T10:54:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.859104 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.859173 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.859193 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.859221 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.859240 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:22Z","lastTransitionTime":"2026-02-02T10:54:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.961507 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.961597 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.961669 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.961703 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:22 crc kubenswrapper[4838]: I0202 10:54:22.961728 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:22Z","lastTransitionTime":"2026-02-02T10:54:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.064135 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.064205 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.064217 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.064255 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.064267 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:23Z","lastTransitionTime":"2026-02-02T10:54:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.167340 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.167403 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.167421 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.167444 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.167461 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:23Z","lastTransitionTime":"2026-02-02T10:54:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.271261 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.271345 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.271368 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.271403 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.271427 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:23Z","lastTransitionTime":"2026-02-02T10:54:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.375212 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.375290 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.375311 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.375345 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.375365 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:23Z","lastTransitionTime":"2026-02-02T10:54:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.438805 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.438874 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.438900 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.438928 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.438948 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:23Z","lastTransitionTime":"2026-02-02T10:54:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:23 crc kubenswrapper[4838]: E0202 10:54:23.459510 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:23Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.464576 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.464672 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.464691 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.464715 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.464733 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:23Z","lastTransitionTime":"2026-02-02T10:54:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.473997 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 06:05:54.297478651 +0000 UTC Feb 02 10:54:23 crc kubenswrapper[4838]: E0202 10:54:23.484293 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:23Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.489819 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.489894 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.489920 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.489949 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.489969 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:23Z","lastTransitionTime":"2026-02-02T10:54:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.505164 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.505199 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.505168 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:23 crc kubenswrapper[4838]: E0202 10:54:23.505373 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:23 crc kubenswrapper[4838]: E0202 10:54:23.505479 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:23 crc kubenswrapper[4838]: E0202 10:54:23.505572 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:23 crc kubenswrapper[4838]: E0202 10:54:23.509423 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:23Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.514344 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.514404 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.514426 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.514452 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.514472 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:23Z","lastTransitionTime":"2026-02-02T10:54:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:23 crc kubenswrapper[4838]: E0202 10:54:23.534362 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:23Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.539738 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.539791 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.539802 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.539819 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.539830 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:23Z","lastTransitionTime":"2026-02-02T10:54:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:23 crc kubenswrapper[4838]: E0202 10:54:23.558257 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:23Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:23 crc kubenswrapper[4838]: E0202 10:54:23.558918 4838 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.565994 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.566036 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.566054 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.566124 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.566144 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:23Z","lastTransitionTime":"2026-02-02T10:54:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.669903 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.669953 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.669971 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.669993 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.670008 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:23Z","lastTransitionTime":"2026-02-02T10:54:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.773572 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.773672 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.773696 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.773724 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.773745 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:23Z","lastTransitionTime":"2026-02-02T10:54:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.876305 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.876378 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.876397 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.876423 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.876440 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:23Z","lastTransitionTime":"2026-02-02T10:54:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.979869 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.979926 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.979943 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.979967 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:23 crc kubenswrapper[4838]: I0202 10:54:23.979985 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:23Z","lastTransitionTime":"2026-02-02T10:54:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.082536 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.082611 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.082683 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.082710 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.082727 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:24Z","lastTransitionTime":"2026-02-02T10:54:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.185209 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.185256 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.185264 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.185277 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.185287 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:24Z","lastTransitionTime":"2026-02-02T10:54:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.287893 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.287933 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.287943 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.287957 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.287965 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:24Z","lastTransitionTime":"2026-02-02T10:54:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.391338 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.391419 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.391445 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.391476 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.391499 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:24Z","lastTransitionTime":"2026-02-02T10:54:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.474985 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 01:53:31.893037096 +0000 UTC Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.495211 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.495303 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.495330 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.495367 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.495388 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:24Z","lastTransitionTime":"2026-02-02T10:54:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.505957 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:24 crc kubenswrapper[4838]: E0202 10:54:24.506160 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.599099 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.599168 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.599185 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.599208 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.599224 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:24Z","lastTransitionTime":"2026-02-02T10:54:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.702016 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.702098 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.702160 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.702241 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.702271 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:24Z","lastTransitionTime":"2026-02-02T10:54:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.805031 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.805107 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.805127 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.805150 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.805169 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:24Z","lastTransitionTime":"2026-02-02T10:54:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.908166 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.908219 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.908234 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.908254 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:24 crc kubenswrapper[4838]: I0202 10:54:24.908268 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:24Z","lastTransitionTime":"2026-02-02T10:54:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.012027 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.012108 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.012133 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.012166 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.012188 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:25Z","lastTransitionTime":"2026-02-02T10:54:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.114903 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.114970 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.114988 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.115015 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.115032 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:25Z","lastTransitionTime":"2026-02-02T10:54:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.218047 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.218134 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.218153 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.218198 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.218236 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:25Z","lastTransitionTime":"2026-02-02T10:54:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.321548 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.321656 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.321676 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.321701 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.321720 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:25Z","lastTransitionTime":"2026-02-02T10:54:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.424854 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.424913 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.424929 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.424952 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.424969 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:25Z","lastTransitionTime":"2026-02-02T10:54:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.475785 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 07:33:09.711482128 +0000 UTC Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.505786 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.505862 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:25 crc kubenswrapper[4838]: E0202 10:54:25.505959 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.505806 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:25 crc kubenswrapper[4838]: E0202 10:54:25.506155 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:25 crc kubenswrapper[4838]: E0202 10:54:25.506268 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.528020 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.528073 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.528090 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.528114 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.528132 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:25Z","lastTransitionTime":"2026-02-02T10:54:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.631328 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.631397 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.631415 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.631440 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.631458 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:25Z","lastTransitionTime":"2026-02-02T10:54:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.734739 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.734821 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.734840 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.734863 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.734879 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:25Z","lastTransitionTime":"2026-02-02T10:54:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.838682 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.838746 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.838767 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.838803 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.838823 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:25Z","lastTransitionTime":"2026-02-02T10:54:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.941865 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.941948 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.941982 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.942013 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:25 crc kubenswrapper[4838]: I0202 10:54:25.942044 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:25Z","lastTransitionTime":"2026-02-02T10:54:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.045729 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.045810 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.045834 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.045865 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.046149 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:26Z","lastTransitionTime":"2026-02-02T10:54:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.149546 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.149604 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.149649 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.149674 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.149690 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:26Z","lastTransitionTime":"2026-02-02T10:54:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.252998 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.253063 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.253079 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.253102 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.253121 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:26Z","lastTransitionTime":"2026-02-02T10:54:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.356266 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.356313 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.356336 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.356363 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.356384 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:26Z","lastTransitionTime":"2026-02-02T10:54:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.460070 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.460397 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.460414 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.460436 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.460454 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:26Z","lastTransitionTime":"2026-02-02T10:54:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.476763 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 07:51:00.491865665 +0000 UTC Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.505474 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:26 crc kubenswrapper[4838]: E0202 10:54:26.505666 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.526989 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:26Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.550343 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:26Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.562718 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.562779 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.562801 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.562830 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.562851 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:26Z","lastTransitionTime":"2026-02-02T10:54:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.570660 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:26Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.589847 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:26Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.610704 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:26Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.630399 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:26Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.646131 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:26Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.664754 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.665217 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.665423 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.665678 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.665856 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:26Z","lastTransitionTime":"2026-02-02T10:54:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.675369 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:17Z\\\",\\\"message\\\":\\\"ving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0077f672b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:machine-mtrc,Protocol:TCP,Port:8441,TargetPort:{1 0 machine-mtrc},NodePort:0,AppProtocol:nil,},ServicePort{Name:machineset-mtrc,Protocol:TCP,Port:8442,TargetPort:{1 0 machineset-mtrc},NodePort:0,AppProtocol:nil,},ServicePort{Name:mhc-mtrc,Protocol:TCP,Port:8444,TargetPort:{1 0 mhc-mtrc},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{k8s-app: controller,},ClusterIP:10.217.4.167,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.167],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0202 10:54:17.353894 6508 services_controller.go:451] Built service openshift-machine-api/machine-api-operator-webhook cluster-wide L\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-66l9c_openshift-ovn-kubernetes(9bc00b9c-6e31-4f8e-b4ba-44150281ed69)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:26Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.692860 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:26Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.706558 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05f6144-f55e-4e08-9104-3730000613bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bae30f81dbb217dd45987bdd5ba01a01d13b8c602153c734f27336412c5a397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9265e112e7858143bc9067c7d9b1d00cab82fc64a1e306c175d620d699c94a36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ee92ab7d9ad96ff606c9e549e3d99a3602fd91c10a4ec7ebeb07932825d521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:26Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.720535 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:26Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.736255 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:26Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.752907 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:26Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.769112 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:26Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.770645 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.770693 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.770710 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.770736 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.770755 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:26Z","lastTransitionTime":"2026-02-02T10:54:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.791495 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:26Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.808413 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfa832ec-d3ca-4c0d-bef6-863867a95110\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b6bb0c34e93fb1ae0885c96c1936812c8faf43e78eae2d53f429c322118857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf49d48779cd69118d1a7fedcb7a6fe58fcd9d9dafaf70cdf9d3d61ce95d80a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4rcnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:26Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.824323 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kdnnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kdnnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:26Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.873966 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.874025 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.874043 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.874068 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.874086 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:26Z","lastTransitionTime":"2026-02-02T10:54:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.976315 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.976365 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.976379 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.976401 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:26 crc kubenswrapper[4838]: I0202 10:54:26.976414 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:26Z","lastTransitionTime":"2026-02-02T10:54:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.079822 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.080338 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.080419 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.080538 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.080647 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:27Z","lastTransitionTime":"2026-02-02T10:54:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.183554 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.183598 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.183609 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.183655 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.183671 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:27Z","lastTransitionTime":"2026-02-02T10:54:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.286688 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.286744 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.286761 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.286783 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.286798 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:27Z","lastTransitionTime":"2026-02-02T10:54:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.389939 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.390008 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.390032 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.390059 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.390083 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:27Z","lastTransitionTime":"2026-02-02T10:54:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.477591 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 10:43:16.476023499 +0000 UTC Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.493573 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.493642 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.493660 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.493682 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.493699 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:27Z","lastTransitionTime":"2026-02-02T10:54:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.505764 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.505847 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:27 crc kubenswrapper[4838]: E0202 10:54:27.505960 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:27 crc kubenswrapper[4838]: E0202 10:54:27.506068 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.506568 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:27 crc kubenswrapper[4838]: E0202 10:54:27.506820 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.596523 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.596591 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.596605 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.596650 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.596664 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:27Z","lastTransitionTime":"2026-02-02T10:54:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.699306 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.699355 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.699366 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.699385 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.699398 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:27Z","lastTransitionTime":"2026-02-02T10:54:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.802306 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.802381 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.802400 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.802430 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.802456 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:27Z","lastTransitionTime":"2026-02-02T10:54:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.905799 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.905846 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.905905 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.905926 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:27 crc kubenswrapper[4838]: I0202 10:54:27.905935 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:27Z","lastTransitionTime":"2026-02-02T10:54:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.008538 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.008591 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.008600 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.008636 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.008646 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:28Z","lastTransitionTime":"2026-02-02T10:54:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.111125 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.111183 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.111201 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.111231 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.111254 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:28Z","lastTransitionTime":"2026-02-02T10:54:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.214970 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.215023 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.215040 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.215063 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.215080 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:28Z","lastTransitionTime":"2026-02-02T10:54:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.317803 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.317868 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.317885 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.317910 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.317931 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:28Z","lastTransitionTime":"2026-02-02T10:54:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.421171 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.421220 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.421235 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.421257 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.421279 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:28Z","lastTransitionTime":"2026-02-02T10:54:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.478040 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 19:31:23.403064726 +0000 UTC Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.508186 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:28 crc kubenswrapper[4838]: E0202 10:54:28.508319 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.523963 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.523991 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.524000 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.524013 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.524023 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:28Z","lastTransitionTime":"2026-02-02T10:54:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.627737 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.627789 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.627805 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.627828 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.627844 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:28Z","lastTransitionTime":"2026-02-02T10:54:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.730837 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.730893 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.730912 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.730936 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.730953 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:28Z","lastTransitionTime":"2026-02-02T10:54:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.833429 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.833481 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.833499 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.833521 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.833537 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:28Z","lastTransitionTime":"2026-02-02T10:54:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.937226 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.937271 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.937286 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.937308 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:28 crc kubenswrapper[4838]: I0202 10:54:28.937324 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:28Z","lastTransitionTime":"2026-02-02T10:54:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.040075 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.040185 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.040204 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.040228 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.040246 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:29Z","lastTransitionTime":"2026-02-02T10:54:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.143030 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.143104 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.143115 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.143129 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.143140 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:29Z","lastTransitionTime":"2026-02-02T10:54:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.246230 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.246275 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.246287 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.246305 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.246317 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:29Z","lastTransitionTime":"2026-02-02T10:54:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.349151 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.349226 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.349250 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.349281 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.349302 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:29Z","lastTransitionTime":"2026-02-02T10:54:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.452181 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.452249 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.452271 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.452299 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.452321 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:29Z","lastTransitionTime":"2026-02-02T10:54:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.478525 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 11:35:25.277238646 +0000 UTC Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.505819 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:29 crc kubenswrapper[4838]: E0202 10:54:29.505997 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.506314 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:29 crc kubenswrapper[4838]: E0202 10:54:29.506424 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.506663 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:29 crc kubenswrapper[4838]: E0202 10:54:29.506769 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.555096 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.555138 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.555149 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.555164 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.555175 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:29Z","lastTransitionTime":"2026-02-02T10:54:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.658045 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.658091 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.658107 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.658129 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.658146 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:29Z","lastTransitionTime":"2026-02-02T10:54:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.760892 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.761187 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.761282 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.761398 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.761534 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:29Z","lastTransitionTime":"2026-02-02T10:54:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.864448 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.864851 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.865472 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.865713 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.865914 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:29Z","lastTransitionTime":"2026-02-02T10:54:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.969485 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.969542 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.969555 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.969574 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:29 crc kubenswrapper[4838]: I0202 10:54:29.969587 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:29Z","lastTransitionTime":"2026-02-02T10:54:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.072677 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.072735 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.072748 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.072766 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.072780 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:30Z","lastTransitionTime":"2026-02-02T10:54:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.175993 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.176258 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.176347 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.176434 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.176510 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:30Z","lastTransitionTime":"2026-02-02T10:54:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.279925 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.279973 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.279989 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.280012 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.280029 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:30Z","lastTransitionTime":"2026-02-02T10:54:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.383310 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.383596 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.383703 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.383802 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.383888 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:30Z","lastTransitionTime":"2026-02-02T10:54:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.478944 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 02:46:31.905336632 +0000 UTC Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.487237 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.487292 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.487309 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.487332 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.487349 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:30Z","lastTransitionTime":"2026-02-02T10:54:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.505246 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:30 crc kubenswrapper[4838]: E0202 10:54:30.505414 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.507047 4838 scope.go:117] "RemoveContainer" containerID="7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc" Feb 02 10:54:30 crc kubenswrapper[4838]: E0202 10:54:30.507554 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-66l9c_openshift-ovn-kubernetes(9bc00b9c-6e31-4f8e-b4ba-44150281ed69)\"" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.590317 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.590521 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.590592 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.590709 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.590785 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:30Z","lastTransitionTime":"2026-02-02T10:54:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.693101 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.693151 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.693169 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.693191 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.693207 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:30Z","lastTransitionTime":"2026-02-02T10:54:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.795719 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.795783 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.795800 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.795825 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.795841 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:30Z","lastTransitionTime":"2026-02-02T10:54:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.898567 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.898971 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.899319 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.899470 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:30 crc kubenswrapper[4838]: I0202 10:54:30.899601 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:30Z","lastTransitionTime":"2026-02-02T10:54:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.002160 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.002211 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.002228 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.002246 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.002257 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:31Z","lastTransitionTime":"2026-02-02T10:54:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.104378 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.104711 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.104855 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.104985 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.105131 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:31Z","lastTransitionTime":"2026-02-02T10:54:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.207596 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.207664 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.207677 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.207695 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.207708 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:31Z","lastTransitionTime":"2026-02-02T10:54:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.309381 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.309427 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.309439 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.309456 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.309467 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:31Z","lastTransitionTime":"2026-02-02T10:54:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.411938 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.411993 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.412009 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.412036 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.412053 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:31Z","lastTransitionTime":"2026-02-02T10:54:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.479668 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 01:09:22.236804166 +0000 UTC Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.505148 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.505177 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:31 crc kubenswrapper[4838]: E0202 10:54:31.505288 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.505647 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:31 crc kubenswrapper[4838]: E0202 10:54:31.505680 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:31 crc kubenswrapper[4838]: E0202 10:54:31.506113 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.515158 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.515203 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.515220 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.515243 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.515261 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:31Z","lastTransitionTime":"2026-02-02T10:54:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.617288 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.617338 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.617354 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.617376 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.617392 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:31Z","lastTransitionTime":"2026-02-02T10:54:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.719268 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.719297 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.719305 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.719316 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.719325 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:31Z","lastTransitionTime":"2026-02-02T10:54:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.821278 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.821321 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.821336 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.821356 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.821372 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:31Z","lastTransitionTime":"2026-02-02T10:54:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.923178 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.923208 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.923221 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.923236 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:31 crc kubenswrapper[4838]: I0202 10:54:31.923248 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:31Z","lastTransitionTime":"2026-02-02T10:54:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.025960 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.026026 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.026038 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.026063 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.026080 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:32Z","lastTransitionTime":"2026-02-02T10:54:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.129187 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.129241 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.129275 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.129310 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.129322 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:32Z","lastTransitionTime":"2026-02-02T10:54:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.232292 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.232342 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.232351 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.232368 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.232379 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:32Z","lastTransitionTime":"2026-02-02T10:54:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.334944 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.334992 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.335001 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.335022 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.335034 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:32Z","lastTransitionTime":"2026-02-02T10:54:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.437589 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.437694 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.437707 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.437739 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.437755 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:32Z","lastTransitionTime":"2026-02-02T10:54:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.480138 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 00:12:09.497398564 +0000 UTC Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.505456 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:32 crc kubenswrapper[4838]: E0202 10:54:32.505656 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.539900 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.539945 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.539957 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.539975 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.539988 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:32Z","lastTransitionTime":"2026-02-02T10:54:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.642753 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.642808 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.642828 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.642849 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.642866 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:32Z","lastTransitionTime":"2026-02-02T10:54:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.745228 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.745298 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.745319 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.745344 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.745362 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:32Z","lastTransitionTime":"2026-02-02T10:54:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.848516 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.848589 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.848611 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.848664 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.848682 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:32Z","lastTransitionTime":"2026-02-02T10:54:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.951353 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.951409 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.951426 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.951450 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:32 crc kubenswrapper[4838]: I0202 10:54:32.951466 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:32Z","lastTransitionTime":"2026-02-02T10:54:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.054061 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.054122 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.054140 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.054165 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.054182 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:33Z","lastTransitionTime":"2026-02-02T10:54:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.156190 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.156243 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.156256 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.156274 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.156287 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:33Z","lastTransitionTime":"2026-02-02T10:54:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.258830 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.258889 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.258905 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.258929 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.258947 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:33Z","lastTransitionTime":"2026-02-02T10:54:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.363770 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.363818 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.363837 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.363859 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.363877 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:33Z","lastTransitionTime":"2026-02-02T10:54:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.466160 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.466198 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.466209 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.466224 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.466235 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:33Z","lastTransitionTime":"2026-02-02T10:54:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.480744 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 16:47:58.735911905 +0000 UTC Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.505196 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:33 crc kubenswrapper[4838]: E0202 10:54:33.505318 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.505416 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:33 crc kubenswrapper[4838]: E0202 10:54:33.505600 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.505682 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:33 crc kubenswrapper[4838]: E0202 10:54:33.505743 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.568531 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.568558 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.568569 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.568583 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.568599 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:33Z","lastTransitionTime":"2026-02-02T10:54:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.671197 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.671260 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.671274 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.671292 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.671306 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:33Z","lastTransitionTime":"2026-02-02T10:54:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.773576 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.773643 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.773653 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.773669 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.773682 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:33Z","lastTransitionTime":"2026-02-02T10:54:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.876424 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.876535 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.876555 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.876578 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.876601 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:33Z","lastTransitionTime":"2026-02-02T10:54:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.913602 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.913683 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.913717 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.913738 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.913754 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:33Z","lastTransitionTime":"2026-02-02T10:54:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:33 crc kubenswrapper[4838]: E0202 10:54:33.925566 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:33Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.929398 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.929450 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.929470 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.929492 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.929508 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:33Z","lastTransitionTime":"2026-02-02T10:54:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:33 crc kubenswrapper[4838]: E0202 10:54:33.941948 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:33Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.945311 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.945360 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.945376 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.945399 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.945519 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:33Z","lastTransitionTime":"2026-02-02T10:54:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:33 crc kubenswrapper[4838]: E0202 10:54:33.962123 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:33Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.966466 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.966501 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.966510 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.966526 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.966536 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:33Z","lastTransitionTime":"2026-02-02T10:54:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:33 crc kubenswrapper[4838]: E0202 10:54:33.984281 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:33Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.987939 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.987980 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.987996 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.988018 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:33 crc kubenswrapper[4838]: I0202 10:54:33.988033 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:33Z","lastTransitionTime":"2026-02-02T10:54:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:34 crc kubenswrapper[4838]: E0202 10:54:34.000139 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:33Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:34 crc kubenswrapper[4838]: E0202 10:54:34.000300 4838 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.001524 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.001557 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.001568 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.001585 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.001597 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:34Z","lastTransitionTime":"2026-02-02T10:54:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.103912 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.103939 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.103965 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.103978 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.103988 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:34Z","lastTransitionTime":"2026-02-02T10:54:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.206390 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.206428 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.206446 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.206469 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.206487 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:34Z","lastTransitionTime":"2026-02-02T10:54:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.309178 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.309254 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.309278 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.309309 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.309331 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:34Z","lastTransitionTime":"2026-02-02T10:54:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.411721 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.411778 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.411795 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.411819 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.411835 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:34Z","lastTransitionTime":"2026-02-02T10:54:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.481729 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 16:30:14.16800709 +0000 UTC Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.505464 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:34 crc kubenswrapper[4838]: E0202 10:54:34.505651 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.514007 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.514069 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.514085 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.514113 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.514130 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:34Z","lastTransitionTime":"2026-02-02T10:54:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.616037 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.616097 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.616115 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.616140 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.616161 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:34Z","lastTransitionTime":"2026-02-02T10:54:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.718170 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.718215 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.718227 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.718246 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.718258 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:34Z","lastTransitionTime":"2026-02-02T10:54:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.820130 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.820170 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.820179 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.820196 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.820207 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:34Z","lastTransitionTime":"2026-02-02T10:54:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.923004 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.923042 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.923050 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.923064 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:34 crc kubenswrapper[4838]: I0202 10:54:34.923073 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:34Z","lastTransitionTime":"2026-02-02T10:54:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.025142 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.025185 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.025196 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.025211 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.025221 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:35Z","lastTransitionTime":"2026-02-02T10:54:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.127834 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.127876 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.127888 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.127904 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.127916 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:35Z","lastTransitionTime":"2026-02-02T10:54:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.230114 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.230165 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.230183 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.230220 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.230240 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:35Z","lastTransitionTime":"2026-02-02T10:54:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.332483 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.332526 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.332542 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.332567 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.332582 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:35Z","lastTransitionTime":"2026-02-02T10:54:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.435565 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.435607 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.435637 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.435653 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.435663 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:35Z","lastTransitionTime":"2026-02-02T10:54:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.482582 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 13:11:00.224360232 +0000 UTC Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.505214 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.505475 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:35 crc kubenswrapper[4838]: E0202 10:54:35.505651 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.505727 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:35 crc kubenswrapper[4838]: E0202 10:54:35.505858 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:35 crc kubenswrapper[4838]: E0202 10:54:35.506034 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.538095 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.538125 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.538138 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.538151 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.538162 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:35Z","lastTransitionTime":"2026-02-02T10:54:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.640830 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.640917 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.640932 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.640949 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.640960 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:35Z","lastTransitionTime":"2026-02-02T10:54:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.743345 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.743381 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.743390 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.743405 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.743414 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:35Z","lastTransitionTime":"2026-02-02T10:54:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.845260 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.845296 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.845307 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.845321 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.845330 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:35Z","lastTransitionTime":"2026-02-02T10:54:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.948835 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.948897 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.948913 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.948935 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:35 crc kubenswrapper[4838]: I0202 10:54:35.948949 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:35Z","lastTransitionTime":"2026-02-02T10:54:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.051274 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.051332 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.051350 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.051374 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.051392 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:36Z","lastTransitionTime":"2026-02-02T10:54:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.154088 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.154143 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.154160 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.154183 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.154202 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:36Z","lastTransitionTime":"2026-02-02T10:54:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.256567 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.256608 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.256637 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.256655 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.256667 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:36Z","lastTransitionTime":"2026-02-02T10:54:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.358806 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.358844 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.358854 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.358867 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.358876 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:36Z","lastTransitionTime":"2026-02-02T10:54:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.460818 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.460865 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.460881 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.460904 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.460922 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:36Z","lastTransitionTime":"2026-02-02T10:54:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.483358 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 03:50:06.271001004 +0000 UTC Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.505847 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:36 crc kubenswrapper[4838]: E0202 10:54:36.506054 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.525284 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:36Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.540597 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:36Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.556835 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:36Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.563176 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.563216 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.563226 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.563242 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.563253 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:36Z","lastTransitionTime":"2026-02-02T10:54:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.570139 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:36Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.583246 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:36Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.599921 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:36Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.610771 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:36Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.629083 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:17Z\\\",\\\"message\\\":\\\"ving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0077f672b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:machine-mtrc,Protocol:TCP,Port:8441,TargetPort:{1 0 machine-mtrc},NodePort:0,AppProtocol:nil,},ServicePort{Name:machineset-mtrc,Protocol:TCP,Port:8442,TargetPort:{1 0 machineset-mtrc},NodePort:0,AppProtocol:nil,},ServicePort{Name:mhc-mtrc,Protocol:TCP,Port:8444,TargetPort:{1 0 mhc-mtrc},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{k8s-app: controller,},ClusterIP:10.217.4.167,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.167],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0202 10:54:17.353894 6508 services_controller.go:451] Built service openshift-machine-api/machine-api-operator-webhook cluster-wide L\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-66l9c_openshift-ovn-kubernetes(9bc00b9c-6e31-4f8e-b4ba-44150281ed69)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:36Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.643187 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:36Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.652071 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05f6144-f55e-4e08-9104-3730000613bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bae30f81dbb217dd45987bdd5ba01a01d13b8c602153c734f27336412c5a397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9265e112e7858143bc9067c7d9b1d00cab82fc64a1e306c175d620d699c94a36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ee92ab7d9ad96ff606c9e549e3d99a3602fd91c10a4ec7ebeb07932825d521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:36Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.664974 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.665014 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.665023 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.665038 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.665047 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:36Z","lastTransitionTime":"2026-02-02T10:54:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.665104 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:36Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.674959 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:36Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.683097 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:36Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.692638 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:36Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.700909 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfa832ec-d3ca-4c0d-bef6-863867a95110\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b6bb0c34e93fb1ae0885c96c1936812c8faf43e78eae2d53f429c322118857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf49d48779cd69118d1a7fedcb7a6fe58fcd9d9dafaf70cdf9d3d61ce95d80a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4rcnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:36Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.710467 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kdnnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kdnnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:36Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.721325 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:36Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.767134 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.767171 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.767182 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.767198 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.767210 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:36Z","lastTransitionTime":"2026-02-02T10:54:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.869742 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.869783 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.869793 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.869807 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.869818 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:36Z","lastTransitionTime":"2026-02-02T10:54:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.971722 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.971747 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.971757 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.971770 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:36 crc kubenswrapper[4838]: I0202 10:54:36.971780 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:36Z","lastTransitionTime":"2026-02-02T10:54:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.074414 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.074629 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.074720 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.074817 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.074900 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:37Z","lastTransitionTime":"2026-02-02T10:54:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.177795 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.177833 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.177841 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.177856 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.177865 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:37Z","lastTransitionTime":"2026-02-02T10:54:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.279951 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.280088 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.280168 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.280267 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.280345 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:37Z","lastTransitionTime":"2026-02-02T10:54:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.362150 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs\") pod \"network-metrics-daemon-kdnnp\" (UID: \"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\") " pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:37 crc kubenswrapper[4838]: E0202 10:54:37.362377 4838 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 10:54:37 crc kubenswrapper[4838]: E0202 10:54:37.362453 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs podName:c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba nodeName:}" failed. No retries permitted until 2026-02-02 10:55:09.362432619 +0000 UTC m=+103.699533657 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs") pod "network-metrics-daemon-kdnnp" (UID: "c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.383162 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.383203 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.383221 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.383241 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.383256 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:37Z","lastTransitionTime":"2026-02-02T10:54:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.484036 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 15:06:35.270353649 +0000 UTC Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.485508 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.485540 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.485551 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.485566 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.485577 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:37Z","lastTransitionTime":"2026-02-02T10:54:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.505085 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.505208 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:37 crc kubenswrapper[4838]: E0202 10:54:37.505392 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:37 crc kubenswrapper[4838]: E0202 10:54:37.505251 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.505085 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:37 crc kubenswrapper[4838]: E0202 10:54:37.505727 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.588820 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.588878 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.588895 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.588917 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.588934 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:37Z","lastTransitionTime":"2026-02-02T10:54:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.692141 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.692197 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.692206 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.692224 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.692235 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:37Z","lastTransitionTime":"2026-02-02T10:54:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.794319 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.794360 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.794373 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.794390 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.794404 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:37Z","lastTransitionTime":"2026-02-02T10:54:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.897547 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.897643 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.897662 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.897729 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.897747 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:37Z","lastTransitionTime":"2026-02-02T10:54:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.999640 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.999690 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.999701 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.999719 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:37 crc kubenswrapper[4838]: I0202 10:54:37.999730 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:37Z","lastTransitionTime":"2026-02-02T10:54:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.102539 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.102576 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.102585 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.102600 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.102608 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:38Z","lastTransitionTime":"2026-02-02T10:54:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.205154 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.205465 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.205532 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.205607 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.205704 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:38Z","lastTransitionTime":"2026-02-02T10:54:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.308440 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.308479 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.308491 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.308507 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.308519 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:38Z","lastTransitionTime":"2026-02-02T10:54:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.410266 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.410302 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.410312 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.410328 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.410338 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:38Z","lastTransitionTime":"2026-02-02T10:54:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.484323 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 13:48:11.427486922 +0000 UTC Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.505863 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:38 crc kubenswrapper[4838]: E0202 10:54:38.505982 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.511733 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.511782 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.511797 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.511814 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.511827 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:38Z","lastTransitionTime":"2026-02-02T10:54:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.614370 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.614431 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.614448 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.614471 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.614489 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:38Z","lastTransitionTime":"2026-02-02T10:54:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.717500 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.717733 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.717745 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.717759 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.717793 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:38Z","lastTransitionTime":"2026-02-02T10:54:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.820710 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.821065 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.821224 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.821449 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.821585 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:38Z","lastTransitionTime":"2026-02-02T10:54:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.923715 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.923766 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.923781 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.923797 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:38 crc kubenswrapper[4838]: I0202 10:54:38.923810 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:38Z","lastTransitionTime":"2026-02-02T10:54:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.006204 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ndxhv_ddc2e893-5801-4e73-a5f6-9cc52f733f49/kube-multus/0.log" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.006260 4838 generic.go:334] "Generic (PLEG): container finished" podID="ddc2e893-5801-4e73-a5f6-9cc52f733f49" containerID="479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108" exitCode=1 Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.006292 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ndxhv" event={"ID":"ddc2e893-5801-4e73-a5f6-9cc52f733f49","Type":"ContainerDied","Data":"479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108"} Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.006693 4838 scope.go:117] "RemoveContainer" containerID="479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.027688 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:39Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.028120 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.028151 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.028162 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.028177 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.028187 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:39Z","lastTransitionTime":"2026-02-02T10:54:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.049345 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:39Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.062897 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:39Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.077827 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:39Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.090737 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:39Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.104862 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:39Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.118089 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:39Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.129337 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:39Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.130595 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.130777 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.130874 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.131028 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.131149 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:39Z","lastTransitionTime":"2026-02-02T10:54:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.145033 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:39Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.156997 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:39Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.174693 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:17Z\\\",\\\"message\\\":\\\"ving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0077f672b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:machine-mtrc,Protocol:TCP,Port:8441,TargetPort:{1 0 machine-mtrc},NodePort:0,AppProtocol:nil,},ServicePort{Name:machineset-mtrc,Protocol:TCP,Port:8442,TargetPort:{1 0 machineset-mtrc},NodePort:0,AppProtocol:nil,},ServicePort{Name:mhc-mtrc,Protocol:TCP,Port:8444,TargetPort:{1 0 mhc-mtrc},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{k8s-app: controller,},ClusterIP:10.217.4.167,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.167],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0202 10:54:17.353894 6508 services_controller.go:451] Built service openshift-machine-api/machine-api-operator-webhook cluster-wide L\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-66l9c_openshift-ovn-kubernetes(9bc00b9c-6e31-4f8e-b4ba-44150281ed69)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:39Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.185797 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:39Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.196227 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05f6144-f55e-4e08-9104-3730000613bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bae30f81dbb217dd45987bdd5ba01a01d13b8c602153c734f27336412c5a397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9265e112e7858143bc9067c7d9b1d00cab82fc64a1e306c175d620d699c94a36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ee92ab7d9ad96ff606c9e549e3d99a3602fd91c10a4ec7ebeb07932825d521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:39Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.205680 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:39Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.216274 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kdnnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kdnnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:39Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.232608 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:38Z\\\",\\\"message\\\":\\\"2026-02-02T10:53:53+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1acfd97f-ea49-4859-b551-26ee03577822\\\\n2026-02-02T10:53:53+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1acfd97f-ea49-4859-b551-26ee03577822 to /host/opt/cni/bin/\\\\n2026-02-02T10:53:53Z [verbose] multus-daemon started\\\\n2026-02-02T10:53:53Z [verbose] Readiness Indicator file check\\\\n2026-02-02T10:54:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:39Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.233805 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.233864 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.233874 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.233894 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.233904 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:39Z","lastTransitionTime":"2026-02-02T10:54:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.246388 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfa832ec-d3ca-4c0d-bef6-863867a95110\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b6bb0c34e93fb1ae0885c96c1936812c8faf43e78eae2d53f429c322118857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf49d48779cd69118d1a7fedcb7a6fe58fcd9d9dafaf70cdf9d3d61ce95d80a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4rcnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:39Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.336437 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.336501 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.336511 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.336530 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.336542 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:39Z","lastTransitionTime":"2026-02-02T10:54:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.440389 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.440437 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.440446 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.440463 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.440478 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:39Z","lastTransitionTime":"2026-02-02T10:54:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.484721 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 17:29:52.850645207 +0000 UTC Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.504960 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.504961 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:39 crc kubenswrapper[4838]: E0202 10:54:39.505151 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.504979 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:39 crc kubenswrapper[4838]: E0202 10:54:39.505214 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:39 crc kubenswrapper[4838]: E0202 10:54:39.505373 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.542986 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.543055 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.543074 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.543100 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.543121 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:39Z","lastTransitionTime":"2026-02-02T10:54:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.645710 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.645781 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.645799 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.645827 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.645848 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:39Z","lastTransitionTime":"2026-02-02T10:54:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.748725 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.748813 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.748832 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.748863 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.748885 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:39Z","lastTransitionTime":"2026-02-02T10:54:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.851125 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.851171 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.851182 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.851198 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.851210 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:39Z","lastTransitionTime":"2026-02-02T10:54:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.953430 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.953494 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.953518 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.953548 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:39 crc kubenswrapper[4838]: I0202 10:54:39.953568 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:39Z","lastTransitionTime":"2026-02-02T10:54:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.012034 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ndxhv_ddc2e893-5801-4e73-a5f6-9cc52f733f49/kube-multus/0.log" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.012246 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ndxhv" event={"ID":"ddc2e893-5801-4e73-a5f6-9cc52f733f49","Type":"ContainerStarted","Data":"94192e5199deb1455cbc4f14f600e68e7432dc6e1b246d174b7a4cc4b4359770"} Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.025795 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfa832ec-d3ca-4c0d-bef6-863867a95110\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b6bb0c34e93fb1ae0885c96c1936812c8faf43e78eae2d53f429c322118857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf49d48779cd69118d1a7fedcb7a6fe58fcd9d9dafaf70cdf9d3d61ce95d80a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4rcnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:40Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.036839 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kdnnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kdnnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:40Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.048800 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94192e5199deb1455cbc4f14f600e68e7432dc6e1b246d174b7a4cc4b4359770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:38Z\\\",\\\"message\\\":\\\"2026-02-02T10:53:53+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1acfd97f-ea49-4859-b551-26ee03577822\\\\n2026-02-02T10:53:53+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1acfd97f-ea49-4859-b551-26ee03577822 to /host/opt/cni/bin/\\\\n2026-02-02T10:53:53Z [verbose] multus-daemon started\\\\n2026-02-02T10:53:53Z [verbose] Readiness Indicator file check\\\\n2026-02-02T10:54:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:40Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.060149 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.061446 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.060543 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:40Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.061478 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.061718 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.061742 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:40Z","lastTransitionTime":"2026-02-02T10:54:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.075940 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:40Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.086705 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:40Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.100540 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:40Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.110409 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:40Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.124543 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:40Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.138913 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05f6144-f55e-4e08-9104-3730000613bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bae30f81dbb217dd45987bdd5ba01a01d13b8c602153c734f27336412c5a397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9265e112e7858143bc9067c7d9b1d00cab82fc64a1e306c175d620d699c94a36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ee92ab7d9ad96ff606c9e549e3d99a3602fd91c10a4ec7ebeb07932825d521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:40Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.152193 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:40Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.162796 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:40Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.164684 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.164754 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.164767 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.164829 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.164848 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:40Z","lastTransitionTime":"2026-02-02T10:54:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.218225 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:40Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.234440 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:40Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.250587 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:17Z\\\",\\\"message\\\":\\\"ving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0077f672b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:machine-mtrc,Protocol:TCP,Port:8441,TargetPort:{1 0 machine-mtrc},NodePort:0,AppProtocol:nil,},ServicePort{Name:machineset-mtrc,Protocol:TCP,Port:8442,TargetPort:{1 0 machineset-mtrc},NodePort:0,AppProtocol:nil,},ServicePort{Name:mhc-mtrc,Protocol:TCP,Port:8444,TargetPort:{1 0 mhc-mtrc},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{k8s-app: controller,},ClusterIP:10.217.4.167,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.167],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0202 10:54:17.353894 6508 services_controller.go:451] Built service openshift-machine-api/machine-api-operator-webhook cluster-wide L\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-66l9c_openshift-ovn-kubernetes(9bc00b9c-6e31-4f8e-b4ba-44150281ed69)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:40Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.261778 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:40Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.267222 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.267272 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.267289 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.267312 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.267328 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:40Z","lastTransitionTime":"2026-02-02T10:54:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.272508 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:40Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.369731 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.369797 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.369821 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.369853 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.369872 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:40Z","lastTransitionTime":"2026-02-02T10:54:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.475928 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.475990 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.476024 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.476054 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.476075 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:40Z","lastTransitionTime":"2026-02-02T10:54:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.485281 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 11:01:24.593275284 +0000 UTC Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.505136 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:40 crc kubenswrapper[4838]: E0202 10:54:40.505345 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.578684 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.578733 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.578745 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.578763 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.578775 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:40Z","lastTransitionTime":"2026-02-02T10:54:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.681085 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.681396 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.681535 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.681809 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.681854 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:40Z","lastTransitionTime":"2026-02-02T10:54:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.784173 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.784220 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.784232 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.784248 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.784261 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:40Z","lastTransitionTime":"2026-02-02T10:54:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.886881 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.886938 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.886955 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.886978 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.886996 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:40Z","lastTransitionTime":"2026-02-02T10:54:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.990277 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.990338 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.990356 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.990382 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:40 crc kubenswrapper[4838]: I0202 10:54:40.990400 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:40Z","lastTransitionTime":"2026-02-02T10:54:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.093088 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.093131 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.093142 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.093158 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.093170 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:41Z","lastTransitionTime":"2026-02-02T10:54:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.196192 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.196244 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.196256 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.196273 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.196288 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:41Z","lastTransitionTime":"2026-02-02T10:54:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.299784 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.299860 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.299884 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.299909 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.299932 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:41Z","lastTransitionTime":"2026-02-02T10:54:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.403097 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.403140 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.403152 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.403173 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.403190 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:41Z","lastTransitionTime":"2026-02-02T10:54:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.486143 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 06:01:46.442237067 +0000 UTC Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.504864 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.504910 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:41 crc kubenswrapper[4838]: E0202 10:54:41.504979 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.505128 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:41 crc kubenswrapper[4838]: E0202 10:54:41.505268 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:41 crc kubenswrapper[4838]: E0202 10:54:41.505346 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.506561 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.506656 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.506682 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.506710 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.506734 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:41Z","lastTransitionTime":"2026-02-02T10:54:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.609408 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.609468 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.609486 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.609511 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.609531 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:41Z","lastTransitionTime":"2026-02-02T10:54:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.711686 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.711746 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.711766 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.711794 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.711814 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:41Z","lastTransitionTime":"2026-02-02T10:54:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.814892 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.814924 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.814932 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.814945 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.814954 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:41Z","lastTransitionTime":"2026-02-02T10:54:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.917541 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.917601 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.917632 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.917649 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:41 crc kubenswrapper[4838]: I0202 10:54:41.917661 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:41Z","lastTransitionTime":"2026-02-02T10:54:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.019598 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.019731 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.019756 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.019787 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.019811 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:42Z","lastTransitionTime":"2026-02-02T10:54:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.121661 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.121691 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.121699 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.121711 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.121719 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:42Z","lastTransitionTime":"2026-02-02T10:54:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.224178 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.224250 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.224271 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.224298 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.224316 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:42Z","lastTransitionTime":"2026-02-02T10:54:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.326162 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.326229 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.326247 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.326271 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.326291 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:42Z","lastTransitionTime":"2026-02-02T10:54:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.429133 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.429178 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.429189 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.429206 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.429219 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:42Z","lastTransitionTime":"2026-02-02T10:54:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.487209 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 10:18:39.053528278 +0000 UTC Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.505730 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:42 crc kubenswrapper[4838]: E0202 10:54:42.505923 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.531461 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.531497 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.531508 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.531523 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.531535 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:42Z","lastTransitionTime":"2026-02-02T10:54:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.634585 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.634688 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.634705 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.634729 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.634751 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:42Z","lastTransitionTime":"2026-02-02T10:54:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.738452 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.738511 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.738530 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.738555 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.738573 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:42Z","lastTransitionTime":"2026-02-02T10:54:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.840778 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.840892 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.840917 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.840950 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.840971 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:42Z","lastTransitionTime":"2026-02-02T10:54:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.944129 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.944210 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.944234 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.944264 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:42 crc kubenswrapper[4838]: I0202 10:54:42.944286 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:42Z","lastTransitionTime":"2026-02-02T10:54:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.046418 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.046494 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.046507 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.046522 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.046535 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:43Z","lastTransitionTime":"2026-02-02T10:54:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.149654 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.149716 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.149733 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.149756 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.149775 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:43Z","lastTransitionTime":"2026-02-02T10:54:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.252335 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.252377 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.252387 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.252403 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.252414 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:43Z","lastTransitionTime":"2026-02-02T10:54:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.355239 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.355288 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.355302 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.355322 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.355338 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:43Z","lastTransitionTime":"2026-02-02T10:54:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.457759 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.457838 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.457863 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.457893 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.457939 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:43Z","lastTransitionTime":"2026-02-02T10:54:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.487542 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 23:01:19.60904239 +0000 UTC Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.504872 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:43 crc kubenswrapper[4838]: E0202 10:54:43.505067 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.505116 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.505142 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:43 crc kubenswrapper[4838]: E0202 10:54:43.505532 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:43 crc kubenswrapper[4838]: E0202 10:54:43.505730 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.520239 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.560783 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.560863 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.560890 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.560922 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.560947 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:43Z","lastTransitionTime":"2026-02-02T10:54:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.663785 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.663844 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.663862 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.663888 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.663908 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:43Z","lastTransitionTime":"2026-02-02T10:54:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.772330 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.772393 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.772410 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.772434 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.772454 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:43Z","lastTransitionTime":"2026-02-02T10:54:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.875593 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.875637 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.875649 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.875669 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.875680 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:43Z","lastTransitionTime":"2026-02-02T10:54:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.979060 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.979171 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.979191 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.979214 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:43 crc kubenswrapper[4838]: I0202 10:54:43.979231 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:43Z","lastTransitionTime":"2026-02-02T10:54:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.081324 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.081363 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.081371 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.081385 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.081394 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:44Z","lastTransitionTime":"2026-02-02T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.183525 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.183558 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.183566 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.183577 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.183586 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:44Z","lastTransitionTime":"2026-02-02T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.286225 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.286285 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.286302 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.286329 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.286346 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:44Z","lastTransitionTime":"2026-02-02T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.333591 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.333654 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.333667 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.333682 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.333690 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:44Z","lastTransitionTime":"2026-02-02T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:44 crc kubenswrapper[4838]: E0202 10:54:44.349610 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:44Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.353878 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.353927 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.353941 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.353957 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.353970 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:44Z","lastTransitionTime":"2026-02-02T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:44 crc kubenswrapper[4838]: E0202 10:54:44.371757 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:44Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.375847 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.375903 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.375920 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.375943 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.375959 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:44Z","lastTransitionTime":"2026-02-02T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:44 crc kubenswrapper[4838]: E0202 10:54:44.395318 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:44Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.400059 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.400109 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.400121 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.400139 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.400152 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:44Z","lastTransitionTime":"2026-02-02T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:44 crc kubenswrapper[4838]: E0202 10:54:44.417321 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:44Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.422818 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.422881 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.422905 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.422939 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.422960 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:44Z","lastTransitionTime":"2026-02-02T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:44 crc kubenswrapper[4838]: E0202 10:54:44.443612 4838 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6d1a8705-67dc-4d3e-99d4-016e519e43da\\\",\\\"systemUUID\\\":\\\"33893a6d-b6bd-46d3-8543-3002098168f9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:44Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:44 crc kubenswrapper[4838]: E0202 10:54:44.444252 4838 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.446679 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.446912 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.447137 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.447368 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.447579 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:44Z","lastTransitionTime":"2026-02-02T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.488155 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 00:50:16.472942544 +0000 UTC Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.505868 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:44 crc kubenswrapper[4838]: E0202 10:54:44.506026 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.507179 4838 scope.go:117] "RemoveContainer" containerID="7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.554989 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.555046 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.555076 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.555107 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.555124 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:44Z","lastTransitionTime":"2026-02-02T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.657345 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.657372 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.657380 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.657394 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.657405 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:44Z","lastTransitionTime":"2026-02-02T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.759980 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.760036 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.760053 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.760075 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.760093 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:44Z","lastTransitionTime":"2026-02-02T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.862796 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.862870 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.862889 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.862914 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.862931 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:44Z","lastTransitionTime":"2026-02-02T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.966023 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.966088 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.966099 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.966122 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:44 crc kubenswrapper[4838]: I0202 10:54:44.966136 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:44Z","lastTransitionTime":"2026-02-02T10:54:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.033167 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovnkube-controller/2.log" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.037044 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerStarted","Data":"f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588"} Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.037889 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.064419 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:45Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.069070 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.069139 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.069162 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.069188 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.069205 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:45Z","lastTransitionTime":"2026-02-02T10:54:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.089521 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:45Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.141854 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:45Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.163091 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:45Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.171559 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.171634 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.171647 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.171668 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.171682 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:45Z","lastTransitionTime":"2026-02-02T10:54:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.183160 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:45Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.215015 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9216721-0d86-424e-9c4f-eed11b09e7b7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://385e32753a69517e12bf4bd0ceb5bbb13e6cd79cf0f7df5ef20dfd36f8f4bac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af5d50ad3a14334ecb762074791d8b64727d86e69438375b3067b12d4588d93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af5d50ad3a14334ecb762074791d8b64727d86e69438375b3067b12d4588d93b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:45Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.237480 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:45Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.255159 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:45Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.265505 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:45Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.274177 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.274228 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.274244 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.274267 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.274284 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:45Z","lastTransitionTime":"2026-02-02T10:54:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.281365 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:45Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.292700 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:45Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.312413 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:17Z\\\",\\\"message\\\":\\\"ving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0077f672b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:machine-mtrc,Protocol:TCP,Port:8441,TargetPort:{1 0 machine-mtrc},NodePort:0,AppProtocol:nil,},ServicePort{Name:machineset-mtrc,Protocol:TCP,Port:8442,TargetPort:{1 0 machineset-mtrc},NodePort:0,AppProtocol:nil,},ServicePort{Name:mhc-mtrc,Protocol:TCP,Port:8444,TargetPort:{1 0 mhc-mtrc},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{k8s-app: controller,},ClusterIP:10.217.4.167,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.167],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0202 10:54:17.353894 6508 services_controller.go:451] Built service openshift-machine-api/machine-api-operator-webhook cluster-wide L\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:45Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.335264 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:45Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.352950 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05f6144-f55e-4e08-9104-3730000613bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bae30f81dbb217dd45987bdd5ba01a01d13b8c602153c734f27336412c5a397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9265e112e7858143bc9067c7d9b1d00cab82fc64a1e306c175d620d699c94a36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ee92ab7d9ad96ff606c9e549e3d99a3602fd91c10a4ec7ebeb07932825d521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:45Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.367651 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:45Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.377812 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.377855 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.377870 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.377890 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.377903 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:45Z","lastTransitionTime":"2026-02-02T10:54:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.385403 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94192e5199deb1455cbc4f14f600e68e7432dc6e1b246d174b7a4cc4b4359770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:38Z\\\",\\\"message\\\":\\\"2026-02-02T10:53:53+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1acfd97f-ea49-4859-b551-26ee03577822\\\\n2026-02-02T10:53:53+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1acfd97f-ea49-4859-b551-26ee03577822 to /host/opt/cni/bin/\\\\n2026-02-02T10:53:53Z [verbose] multus-daemon started\\\\n2026-02-02T10:53:53Z [verbose] Readiness Indicator file check\\\\n2026-02-02T10:54:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:45Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.397420 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfa832ec-d3ca-4c0d-bef6-863867a95110\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b6bb0c34e93fb1ae0885c96c1936812c8faf43e78eae2d53f429c322118857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf49d48779cd69118d1a7fedcb7a6fe58fcd9d9dafaf70cdf9d3d61ce95d80a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4rcnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:45Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.410438 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kdnnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kdnnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:45Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.480808 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.480893 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.480912 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.480944 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.480964 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:45Z","lastTransitionTime":"2026-02-02T10:54:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.489276 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 21:20:24.819523998 +0000 UTC Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.504838 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.504920 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.504854 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:45 crc kubenswrapper[4838]: E0202 10:54:45.505021 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:45 crc kubenswrapper[4838]: E0202 10:54:45.505123 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:45 crc kubenswrapper[4838]: E0202 10:54:45.505259 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.583905 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.583966 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.583983 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.584006 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.584024 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:45Z","lastTransitionTime":"2026-02-02T10:54:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.687579 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.687671 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.687689 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.687713 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.687730 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:45Z","lastTransitionTime":"2026-02-02T10:54:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.790915 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.790981 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.790999 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.791024 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.791042 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:45Z","lastTransitionTime":"2026-02-02T10:54:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.894793 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.894846 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.894864 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.894888 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.894907 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:45Z","lastTransitionTime":"2026-02-02T10:54:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.998851 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.998915 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.998933 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.998959 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:45 crc kubenswrapper[4838]: I0202 10:54:45.998978 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:45Z","lastTransitionTime":"2026-02-02T10:54:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.043387 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovnkube-controller/3.log" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.044463 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovnkube-controller/2.log" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.049783 4838 generic.go:334] "Generic (PLEG): container finished" podID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerID="f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588" exitCode=1 Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.049842 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerDied","Data":"f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588"} Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.049893 4838 scope.go:117] "RemoveContainer" containerID="7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.051096 4838 scope.go:117] "RemoveContainer" containerID="f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588" Feb 02 10:54:46 crc kubenswrapper[4838]: E0202 10:54:46.051412 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-66l9c_openshift-ovn-kubernetes(9bc00b9c-6e31-4f8e-b4ba-44150281ed69)\"" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.068352 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05f6144-f55e-4e08-9104-3730000613bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bae30f81dbb217dd45987bdd5ba01a01d13b8c602153c734f27336412c5a397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9265e112e7858143bc9067c7d9b1d00cab82fc64a1e306c175d620d699c94a36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ee92ab7d9ad96ff606c9e549e3d99a3602fd91c10a4ec7ebeb07932825d521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.102183 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.102251 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.102268 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.102295 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.102314 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:46Z","lastTransitionTime":"2026-02-02T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.106792 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.123768 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.149801 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.170667 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.205740 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:17Z\\\",\\\"message\\\":\\\"ving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0077f672b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:machine-mtrc,Protocol:TCP,Port:8441,TargetPort:{1 0 machine-mtrc},NodePort:0,AppProtocol:nil,},ServicePort{Name:machineset-mtrc,Protocol:TCP,Port:8442,TargetPort:{1 0 machineset-mtrc},NodePort:0,AppProtocol:nil,},ServicePort{Name:mhc-mtrc,Protocol:TCP,Port:8444,TargetPort:{1 0 mhc-mtrc},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{k8s-app: controller,},ClusterIP:10.217.4.167,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.167],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0202 10:54:17.353894 6508 services_controller.go:451] Built service openshift-machine-api/machine-api-operator-webhook cluster-wide L\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:45Z\\\",\\\"message\\\":\\\"54:45.722928 6906 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 10:54:45.723004 6906 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0202 10:54:45.723100 6906 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0202 10:54:45.723362 6906 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:45.723541 6906 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:45.723806 6906 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:45.723936 6906 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:45.723967 6906 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:45.724271 6906 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.206604 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.206709 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.206730 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.206760 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.206801 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:46Z","lastTransitionTime":"2026-02-02T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.230288 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.245466 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.264259 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfa832ec-d3ca-4c0d-bef6-863867a95110\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b6bb0c34e93fb1ae0885c96c1936812c8faf43e78eae2d53f429c322118857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf49d48779cd69118d1a7fedcb7a6fe58fcd9d9dafaf70cdf9d3d61ce95d80a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4rcnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.282034 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kdnnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kdnnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.303499 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94192e5199deb1455cbc4f14f600e68e7432dc6e1b246d174b7a4cc4b4359770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:38Z\\\",\\\"message\\\":\\\"2026-02-02T10:53:53+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1acfd97f-ea49-4859-b551-26ee03577822\\\\n2026-02-02T10:53:53+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1acfd97f-ea49-4859-b551-26ee03577822 to /host/opt/cni/bin/\\\\n2026-02-02T10:53:53Z [verbose] multus-daemon started\\\\n2026-02-02T10:53:53Z [verbose] Readiness Indicator file check\\\\n2026-02-02T10:54:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.310009 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.310069 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.310094 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.310123 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.310143 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:46Z","lastTransitionTime":"2026-02-02T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.325085 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.343410 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9216721-0d86-424e-9c4f-eed11b09e7b7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://385e32753a69517e12bf4bd0ceb5bbb13e6cd79cf0f7df5ef20dfd36f8f4bac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af5d50ad3a14334ecb762074791d8b64727d86e69438375b3067b12d4588d93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af5d50ad3a14334ecb762074791d8b64727d86e69438375b3067b12d4588d93b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.364823 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.385226 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.408739 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.413699 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.413755 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.413772 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.413795 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.413812 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:46Z","lastTransitionTime":"2026-02-02T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.430700 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.449793 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.490201 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 11:35:29.716011407 +0000 UTC Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.505066 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:46 crc kubenswrapper[4838]: E0202 10:54:46.505260 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.516873 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.516921 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.516937 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.516961 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.516978 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:46Z","lastTransitionTime":"2026-02-02T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.527248 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.548674 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.568986 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.586864 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9216721-0d86-424e-9c4f-eed11b09e7b7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://385e32753a69517e12bf4bd0ceb5bbb13e6cd79cf0f7df5ef20dfd36f8f4bac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af5d50ad3a14334ecb762074791d8b64727d86e69438375b3067b12d4588d93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af5d50ad3a14334ecb762074791d8b64727d86e69438375b3067b12d4588d93b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.610417 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.619877 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.619926 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.619944 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.619968 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.619986 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:46Z","lastTransitionTime":"2026-02-02T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.630840 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.655515 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.684990 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.705951 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.722840 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.722898 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.722922 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.722952 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.722973 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:46Z","lastTransitionTime":"2026-02-02T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.743714 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d27d5c95df96b16d529337161ad976e8d55208625b2dd109e127944108db0bc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:17Z\\\",\\\"message\\\":\\\"ving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0077f672b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:machine-mtrc,Protocol:TCP,Port:8441,TargetPort:{1 0 machine-mtrc},NodePort:0,AppProtocol:nil,},ServicePort{Name:machineset-mtrc,Protocol:TCP,Port:8442,TargetPort:{1 0 machineset-mtrc},NodePort:0,AppProtocol:nil,},ServicePort{Name:mhc-mtrc,Protocol:TCP,Port:8444,TargetPort:{1 0 mhc-mtrc},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{k8s-app: controller,},ClusterIP:10.217.4.167,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.167],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0202 10:54:17.353894 6508 services_controller.go:451] Built service openshift-machine-api/machine-api-operator-webhook cluster-wide L\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:45Z\\\",\\\"message\\\":\\\"54:45.722928 6906 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 10:54:45.723004 6906 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0202 10:54:45.723100 6906 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0202 10:54:45.723362 6906 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:45.723541 6906 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:45.723806 6906 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:45.723936 6906 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:45.723967 6906 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:45.724271 6906 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.766666 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.783341 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05f6144-f55e-4e08-9104-3730000613bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bae30f81dbb217dd45987bdd5ba01a01d13b8c602153c734f27336412c5a397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9265e112e7858143bc9067c7d9b1d00cab82fc64a1e306c175d620d699c94a36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ee92ab7d9ad96ff606c9e549e3d99a3602fd91c10a4ec7ebeb07932825d521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.808733 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.826095 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.826224 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.826246 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.826308 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.826328 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:46Z","lastTransitionTime":"2026-02-02T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.826498 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.846661 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.867949 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94192e5199deb1455cbc4f14f600e68e7432dc6e1b246d174b7a4cc4b4359770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:38Z\\\",\\\"message\\\":\\\"2026-02-02T10:53:53+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1acfd97f-ea49-4859-b551-26ee03577822\\\\n2026-02-02T10:53:53+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1acfd97f-ea49-4859-b551-26ee03577822 to /host/opt/cni/bin/\\\\n2026-02-02T10:53:53Z [verbose] multus-daemon started\\\\n2026-02-02T10:53:53Z [verbose] Readiness Indicator file check\\\\n2026-02-02T10:54:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.885821 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfa832ec-d3ca-4c0d-bef6-863867a95110\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b6bb0c34e93fb1ae0885c96c1936812c8faf43e78eae2d53f429c322118857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf49d48779cd69118d1a7fedcb7a6fe58fcd9d9dafaf70cdf9d3d61ce95d80a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4rcnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.903577 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kdnnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kdnnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:46Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.929304 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.929374 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.929386 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.929404 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:46 crc kubenswrapper[4838]: I0202 10:54:46.929420 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:46Z","lastTransitionTime":"2026-02-02T10:54:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.032887 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.032938 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.032954 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.032976 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.032993 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:47Z","lastTransitionTime":"2026-02-02T10:54:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.056071 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovnkube-controller/3.log" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.063479 4838 scope.go:117] "RemoveContainer" containerID="f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588" Feb 02 10:54:47 crc kubenswrapper[4838]: E0202 10:54:47.063775 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-66l9c_openshift-ovn-kubernetes(9bc00b9c-6e31-4f8e-b4ba-44150281ed69)\"" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.084330 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ndxhv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ddc2e893-5801-4e73-a5f6-9cc52f733f49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94192e5199deb1455cbc4f14f600e68e7432dc6e1b246d174b7a4cc4b4359770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:38Z\\\",\\\"message\\\":\\\"2026-02-02T10:53:53+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1acfd97f-ea49-4859-b551-26ee03577822\\\\n2026-02-02T10:53:53+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1acfd97f-ea49-4859-b551-26ee03577822 to /host/opt/cni/bin/\\\\n2026-02-02T10:53:53Z [verbose] multus-daemon started\\\\n2026-02-02T10:53:53Z [verbose] Readiness Indicator file check\\\\n2026-02-02T10:54:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5tvlz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ndxhv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:47Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.104561 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dfa832ec-d3ca-4c0d-bef6-863867a95110\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b6bb0c34e93fb1ae0885c96c1936812c8faf43e78eae2d53f429c322118857b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf49d48779cd69118d1a7fedcb7a6fe58fcd9d9dafaf70cdf9d3d61ce95d80a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:54:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mz6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4rcnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:47Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.121158 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kdnnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:54:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kdnnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:47Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.135845 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.135900 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.135908 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.135922 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.135949 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:47Z","lastTransitionTime":"2026-02-02T10:54:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.143182 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:47Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.161549 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68dc50fc-d558-4afd-86e0-8abb132d94eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a251e3b47cbbe812a1cd7a6b87d63fa637b9410b867be9285fdfbd892f5e6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cbae7ecd46edfc6d00a00b7ee219298e00378388d67ca9c9d09581efda3cdb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15a067e0b5f84fd62c6d0b81d6c7cfe429bf99a76a4f65a6c4d466f9b68d6006\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:47Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.178906 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9216721-0d86-424e-9c4f-eed11b09e7b7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://385e32753a69517e12bf4bd0ceb5bbb13e6cd79cf0f7df5ef20dfd36f8f4bac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af5d50ad3a14334ecb762074791d8b64727d86e69438375b3067b12d4588d93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af5d50ad3a14334ecb762074791d8b64727d86e69438375b3067b12d4588d93b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:47Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.199550 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7227e2b1e133824b9ad04a95d7820a33bf7d14121f55321da3fd532616ba360d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:47Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.221439 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:47Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.237441 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5396cc25f9a9ab835a02e28091bfe8e539848595b112224a650f7e2cc550e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91aaa16398a981aaacd400addc3112a194599475b42391590fda4c262c634f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:47Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.239711 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.239860 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.239884 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.239909 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.239927 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:47Z","lastTransitionTime":"2026-02-02T10:54:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.256010 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:47Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:47Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.276831 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe56663-b047-48b0-864b-53bd2a18f1be\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T10:53:46Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0202 10:53:40.647398 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 10:53:40.649245 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4266224476/tls.crt::/tmp/serving-cert-4266224476/tls.key\\\\\\\"\\\\nI0202 10:53:46.084471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 10:53:46.087359 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 10:53:46.087385 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 10:53:46.087412 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 10:53:46.087419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 10:53:46.095368 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 10:53:46.095395 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095402 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 10:53:46.095408 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 10:53:46.095413 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 10:53:46.095417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 10:53:46.095421 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0202 10:53:46.095470 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0202 10:53:46.098528 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:47Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.294000 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05f6144-f55e-4e08-9104-3730000613bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:54:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bae30f81dbb217dd45987bdd5ba01a01d13b8c602153c734f27336412c5a397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9265e112e7858143bc9067c7d9b1d00cab82fc64a1e306c175d620d699c94a36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://74ee92ab7d9ad96ff606c9e549e3d99a3602fd91c10a4ec7ebeb07932825d521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51090d7e9383392f3daab78599cfd0aa3871d6d89d39dcf4c45af7441328b672\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:47Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.312867 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4377aaf2ab59b1596d8eeb37cb64f66bf337eb159084e12b03b9d75de7b3c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:47Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.330033 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mz9jt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89c31cbf-ff09-4ee1-91eb-0ce82d805dd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62b03b91aa1625e683ddfaa443b4ae6950ee90b580f8dd5572bd2ae34224b209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfsfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mz9jt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:47Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.343347 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.343412 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.343424 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.343442 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.343456 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:47Z","lastTransitionTime":"2026-02-02T10:54:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.354282 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"464d6539-c3a0-4529-b9a7-45211255c1dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6b7df7a695cbe85f6c056fdaf425c3d965d0dcd612c0a98b05b9386d42294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c0d20defc96e92df09d96638fa19d676ff911e9e3dec031be99afb3ba6ed971\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5d379eca9d81f90d4464280677b22321d1c3048a0ff4cdbe95f52063b4d7184\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b0907dd1a5907e8713bc7c7a5b298ebccda6c0a15f3c2d1b0849a7e64746274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27382df9ae108fca66f0c349474fae7f4ff9e952eb35305c8e6985eca457aee0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a78da289763e92ff68e65fbfe417a5a51adb75c62734ade34f502f2b3a61ec9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb07ab2737219e2664a8841c75593cfdb29b45f205826f9013bc5cf3acc77eee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4j6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xrkv9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:47Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.372050 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef550b82e481258d88c22fa73ff31dc069240c7e7c2f77caead78702a6957d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jmt8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:50Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-n7ctv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:47Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.401259 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T10:54:45Z\\\",\\\"message\\\":\\\"54:45.722928 6906 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 10:54:45.723004 6906 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0202 10:54:45.723100 6906 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0202 10:54:45.723362 6906 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:45.723541 6906 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:45.723806 6906 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:45.723936 6906 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:45.723967 6906 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0202 10:54:45.724271 6906 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T10:54:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-66l9c_openshift-ovn-kubernetes(9bc00b9c-6e31-4f8e-b4ba-44150281ed69)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T10:53:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T10:53:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7p8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-66l9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:47Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.416695 4838 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ftlpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c555924-1f4c-4168-b9da-61f639e8e50d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T10:53:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29ee405f7e8226e5415daaa44df564a7a33e7327a43224b3fbb228295357ae97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T10:53:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r429m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T10:53:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ftlpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T10:54:47Z is after 2025-08-24T17:21:41Z" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.446409 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.446466 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.446487 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.446515 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.446535 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:47Z","lastTransitionTime":"2026-02-02T10:54:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.491204 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 04:07:31.989200006 +0000 UTC Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.505707 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.505850 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:47 crc kubenswrapper[4838]: E0202 10:54:47.506124 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.506498 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:47 crc kubenswrapper[4838]: E0202 10:54:47.506707 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:47 crc kubenswrapper[4838]: E0202 10:54:47.506994 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.549082 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.549204 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.549224 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.549245 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.549263 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:47Z","lastTransitionTime":"2026-02-02T10:54:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.652007 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.652068 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.652085 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.652112 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.652131 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:47Z","lastTransitionTime":"2026-02-02T10:54:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.756025 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.756092 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.756109 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.756137 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.756492 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:47Z","lastTransitionTime":"2026-02-02T10:54:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.859878 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.859957 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.859980 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.860004 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.860021 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:47Z","lastTransitionTime":"2026-02-02T10:54:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.962778 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.962837 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.962854 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.962877 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:47 crc kubenswrapper[4838]: I0202 10:54:47.962894 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:47Z","lastTransitionTime":"2026-02-02T10:54:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.065567 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.065672 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.065698 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.065732 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.065757 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:48Z","lastTransitionTime":"2026-02-02T10:54:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.170080 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.170126 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.170142 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.170165 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.170183 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:48Z","lastTransitionTime":"2026-02-02T10:54:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.273900 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.273961 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.273977 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.274024 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.274097 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:48Z","lastTransitionTime":"2026-02-02T10:54:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.378404 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.378499 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.378527 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.378585 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.378653 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:48Z","lastTransitionTime":"2026-02-02T10:54:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.481984 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.482039 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.482053 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.482074 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.482091 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:48Z","lastTransitionTime":"2026-02-02T10:54:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.491701 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 08:59:21.983852678 +0000 UTC Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.505300 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:48 crc kubenswrapper[4838]: E0202 10:54:48.505535 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.585368 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.585463 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.585482 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.585509 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.585528 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:48Z","lastTransitionTime":"2026-02-02T10:54:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.689229 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.689299 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.689322 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.689353 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.689374 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:48Z","lastTransitionTime":"2026-02-02T10:54:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.792968 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.793052 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.793091 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.793130 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.793154 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:48Z","lastTransitionTime":"2026-02-02T10:54:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.897006 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.897091 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.897113 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.897139 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.897159 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:48Z","lastTransitionTime":"2026-02-02T10:54:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.999866 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.999934 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:48 crc kubenswrapper[4838]: I0202 10:54:48.999951 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:48.999977 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.000007 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:48Z","lastTransitionTime":"2026-02-02T10:54:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.104089 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.104463 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.104674 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.104845 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.105054 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:49Z","lastTransitionTime":"2026-02-02T10:54:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.208345 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.208699 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.208848 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.208993 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.209160 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:49Z","lastTransitionTime":"2026-02-02T10:54:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.312346 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.312401 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.312420 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.312445 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.312465 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:49Z","lastTransitionTime":"2026-02-02T10:54:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.415728 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.415786 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.415801 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.415826 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.415840 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:49Z","lastTransitionTime":"2026-02-02T10:54:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.492020 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 03:11:14.031627438 +0000 UTC Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.505445 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.505470 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:49 crc kubenswrapper[4838]: E0202 10:54:49.506173 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:49 crc kubenswrapper[4838]: E0202 10:54:49.505975 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.505522 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:49 crc kubenswrapper[4838]: E0202 10:54:49.506384 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.519197 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.519268 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.519294 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.519327 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.519351 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:49Z","lastTransitionTime":"2026-02-02T10:54:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.622342 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.622391 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.622403 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.622421 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.622432 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:49Z","lastTransitionTime":"2026-02-02T10:54:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.725374 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.725449 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.725475 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.725506 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.725531 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:49Z","lastTransitionTime":"2026-02-02T10:54:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.829161 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.829230 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.829249 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.829273 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.829290 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:49Z","lastTransitionTime":"2026-02-02T10:54:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.932677 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.932817 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.932840 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.932872 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:49 crc kubenswrapper[4838]: I0202 10:54:49.932894 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:49Z","lastTransitionTime":"2026-02-02T10:54:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.035694 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.035758 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.035776 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.035801 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.035819 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:50Z","lastTransitionTime":"2026-02-02T10:54:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.139863 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.139922 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.139939 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.139965 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.139983 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:50Z","lastTransitionTime":"2026-02-02T10:54:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.243784 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.243856 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.243890 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.243919 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.243941 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:50Z","lastTransitionTime":"2026-02-02T10:54:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.347224 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.347308 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.347337 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.347364 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.347384 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:50Z","lastTransitionTime":"2026-02-02T10:54:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.450031 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.450104 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.450128 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.450157 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.450181 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:50Z","lastTransitionTime":"2026-02-02T10:54:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.492846 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 12:49:43.409162095 +0000 UTC Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.505382 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:50 crc kubenswrapper[4838]: E0202 10:54:50.505603 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.553422 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.553497 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.553517 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.553544 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.553563 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:50Z","lastTransitionTime":"2026-02-02T10:54:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.656100 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.656376 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.656472 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.656564 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.656694 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:50Z","lastTransitionTime":"2026-02-02T10:54:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.759718 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.759788 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.759806 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.759835 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.759852 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:50Z","lastTransitionTime":"2026-02-02T10:54:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.862290 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.862547 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.862636 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.862716 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.862826 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:50Z","lastTransitionTime":"2026-02-02T10:54:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.966111 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.966193 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.966210 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.966238 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:50 crc kubenswrapper[4838]: I0202 10:54:50.966256 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:50Z","lastTransitionTime":"2026-02-02T10:54:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.069796 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.069840 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.069856 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.069877 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.069892 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:51Z","lastTransitionTime":"2026-02-02T10:54:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.173033 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.173091 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.173104 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.173123 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.173136 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:51Z","lastTransitionTime":"2026-02-02T10:54:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.276576 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.276689 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.276712 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.276738 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.276774 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:51Z","lastTransitionTime":"2026-02-02T10:54:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.340494 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.340716 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:51 crc kubenswrapper[4838]: E0202 10:54:51.340821 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:55.340784018 +0000 UTC m=+149.677885086 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:54:51 crc kubenswrapper[4838]: E0202 10:54:51.340826 4838 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.340960 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:51 crc kubenswrapper[4838]: E0202 10:54:51.341049 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 10:55:55.341009884 +0000 UTC m=+149.678110952 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 10:54:51 crc kubenswrapper[4838]: E0202 10:54:51.341061 4838 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 10:54:51 crc kubenswrapper[4838]: E0202 10:54:51.341126 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 10:55:55.341105777 +0000 UTC m=+149.678206815 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.379662 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.379709 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.379723 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.379740 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.379751 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:51Z","lastTransitionTime":"2026-02-02T10:54:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.441838 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.441910 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:51 crc kubenswrapper[4838]: E0202 10:54:51.442078 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 10:54:51 crc kubenswrapper[4838]: E0202 10:54:51.442107 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 10:54:51 crc kubenswrapper[4838]: E0202 10:54:51.442121 4838 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:54:51 crc kubenswrapper[4838]: E0202 10:54:51.442116 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 10:54:51 crc kubenswrapper[4838]: E0202 10:54:51.442170 4838 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 10:54:51 crc kubenswrapper[4838]: E0202 10:54:51.442188 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 10:55:55.442168353 +0000 UTC m=+149.779269471 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:54:51 crc kubenswrapper[4838]: E0202 10:54:51.442192 4838 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:54:51 crc kubenswrapper[4838]: E0202 10:54:51.442429 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 10:55:55.44240377 +0000 UTC m=+149.779504838 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.482223 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.482268 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.482280 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.482298 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.482310 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:51Z","lastTransitionTime":"2026-02-02T10:54:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.493702 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 07:51:47.647793222 +0000 UTC Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.505571 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.505608 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:51 crc kubenswrapper[4838]: E0202 10:54:51.505714 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.505730 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:51 crc kubenswrapper[4838]: E0202 10:54:51.505871 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:51 crc kubenswrapper[4838]: E0202 10:54:51.505933 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.584707 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.584747 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.584758 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.584773 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.584785 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:51Z","lastTransitionTime":"2026-02-02T10:54:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.687682 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.687738 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.687756 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.687780 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.687799 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:51Z","lastTransitionTime":"2026-02-02T10:54:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.790733 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.790803 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.790821 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.790845 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.790863 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:51Z","lastTransitionTime":"2026-02-02T10:54:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.893880 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.893957 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.893978 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.894001 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.894019 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:51Z","lastTransitionTime":"2026-02-02T10:54:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.996758 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.996816 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.996828 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.996846 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:51 crc kubenswrapper[4838]: I0202 10:54:51.996862 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:51Z","lastTransitionTime":"2026-02-02T10:54:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.099878 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.100090 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.100102 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.100122 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.100136 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:52Z","lastTransitionTime":"2026-02-02T10:54:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.202756 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.202815 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.202840 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.202883 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.202903 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:52Z","lastTransitionTime":"2026-02-02T10:54:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.305786 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.305847 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.305895 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.305924 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.305948 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:52Z","lastTransitionTime":"2026-02-02T10:54:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.408870 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.408937 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.408960 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.408991 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.409013 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:52Z","lastTransitionTime":"2026-02-02T10:54:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.494575 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 22:41:13.077839842 +0000 UTC Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.505017 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:52 crc kubenswrapper[4838]: E0202 10:54:52.505206 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.512066 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.512184 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.512203 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.512229 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.512247 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:52Z","lastTransitionTime":"2026-02-02T10:54:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.615286 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.615387 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.615404 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.615426 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.615446 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:52Z","lastTransitionTime":"2026-02-02T10:54:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.719003 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.719083 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.719108 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.719140 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.719168 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:52Z","lastTransitionTime":"2026-02-02T10:54:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.821585 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.821748 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.821773 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.821799 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.821816 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:52Z","lastTransitionTime":"2026-02-02T10:54:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.925181 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.925275 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.925300 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.925328 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:52 crc kubenswrapper[4838]: I0202 10:54:52.925350 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:52Z","lastTransitionTime":"2026-02-02T10:54:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.028338 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.028394 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.028411 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.028439 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.028455 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:53Z","lastTransitionTime":"2026-02-02T10:54:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.131889 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.131961 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.131976 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.132006 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.132018 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:53Z","lastTransitionTime":"2026-02-02T10:54:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.235336 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.235374 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.235390 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.235432 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.235449 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:53Z","lastTransitionTime":"2026-02-02T10:54:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.338368 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.338440 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.338466 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.338496 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.338520 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:53Z","lastTransitionTime":"2026-02-02T10:54:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.441008 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.441074 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.441092 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.441118 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.441136 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:53Z","lastTransitionTime":"2026-02-02T10:54:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.495099 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 11:09:58.411821424 +0000 UTC Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.505542 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.505967 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.506127 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:53 crc kubenswrapper[4838]: E0202 10:54:53.506120 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:53 crc kubenswrapper[4838]: E0202 10:54:53.506456 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:53 crc kubenswrapper[4838]: E0202 10:54:53.506709 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.545524 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.545563 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.545575 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.545593 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.545604 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:53Z","lastTransitionTime":"2026-02-02T10:54:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.648532 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.648590 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.648642 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.648672 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.648692 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:53Z","lastTransitionTime":"2026-02-02T10:54:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.751254 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.751316 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.751333 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.751356 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.751373 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:53Z","lastTransitionTime":"2026-02-02T10:54:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.854441 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.854496 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.854513 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.854538 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.854556 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:53Z","lastTransitionTime":"2026-02-02T10:54:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.956884 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.956942 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.956964 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.956992 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:53 crc kubenswrapper[4838]: I0202 10:54:53.957014 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:53Z","lastTransitionTime":"2026-02-02T10:54:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.060205 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.060264 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.060296 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.060336 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.060360 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:54Z","lastTransitionTime":"2026-02-02T10:54:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.163463 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.163533 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.163580 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.163612 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.163704 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:54Z","lastTransitionTime":"2026-02-02T10:54:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.266868 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.266926 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.266944 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.266971 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.266990 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:54Z","lastTransitionTime":"2026-02-02T10:54:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.370318 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.370344 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.370354 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.370368 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.370378 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:54Z","lastTransitionTime":"2026-02-02T10:54:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.473224 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.473329 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.473344 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.473366 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.473379 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:54Z","lastTransitionTime":"2026-02-02T10:54:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.495532 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 13:51:26.088280661 +0000 UTC Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.506056 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:54 crc kubenswrapper[4838]: E0202 10:54:54.506246 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.576349 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.576398 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.576418 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.576440 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.576455 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:54Z","lastTransitionTime":"2026-02-02T10:54:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.607448 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.607513 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.607533 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.607563 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.607584 4838 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T10:54:54Z","lastTransitionTime":"2026-02-02T10:54:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.676991 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jx65"] Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.677558 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jx65" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.680387 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.680600 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.680775 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.682402 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.700827 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podStartSLOduration=64.700796065 podStartE2EDuration="1m4.700796065s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:54:54.70025379 +0000 UTC m=+89.037354848" watchObservedRunningTime="2026-02-02 10:54:54.700796065 +0000 UTC m=+89.037897133" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.773723 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=68.77369472 podStartE2EDuration="1m8.77369472s" podCreationTimestamp="2026-02-02 10:53:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:54:54.757118858 +0000 UTC m=+89.094219936" watchObservedRunningTime="2026-02-02 10:54:54.77369472 +0000 UTC m=+89.110795778" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.774445 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=36.774434251 podStartE2EDuration="36.774434251s" podCreationTimestamp="2026-02-02 10:54:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:54:54.773535635 +0000 UTC m=+89.110636703" watchObservedRunningTime="2026-02-02 10:54:54.774434251 +0000 UTC m=+89.111535319" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.780018 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/7b92d62c-fb59-4b70-a267-2b6b56e4870c-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9jx65\" (UID: \"7b92d62c-fb59-4b70-a267-2b6b56e4870c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jx65" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.780096 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7b92d62c-fb59-4b70-a267-2b6b56e4870c-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9jx65\" (UID: \"7b92d62c-fb59-4b70-a267-2b6b56e4870c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jx65" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.780138 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7b92d62c-fb59-4b70-a267-2b6b56e4870c-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9jx65\" (UID: \"7b92d62c-fb59-4b70-a267-2b6b56e4870c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jx65" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.780236 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/7b92d62c-fb59-4b70-a267-2b6b56e4870c-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9jx65\" (UID: \"7b92d62c-fb59-4b70-a267-2b6b56e4870c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jx65" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.780261 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b92d62c-fb59-4b70-a267-2b6b56e4870c-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9jx65\" (UID: \"7b92d62c-fb59-4b70-a267-2b6b56e4870c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jx65" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.809854 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-mz9jt" podStartSLOduration=64.809584371 podStartE2EDuration="1m4.809584371s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:54:54.804368623 +0000 UTC m=+89.141469681" watchObservedRunningTime="2026-02-02 10:54:54.809584371 +0000 UTC m=+89.146685399" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.845069 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-xrkv9" podStartSLOduration=64.84504505 podStartE2EDuration="1m4.84504505s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:54:54.828552371 +0000 UTC m=+89.165653419" watchObservedRunningTime="2026-02-02 10:54:54.84504505 +0000 UTC m=+89.182146078" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.862324 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-ftlpr" podStartSLOduration=64.862294151 podStartE2EDuration="1m4.862294151s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:54:54.845475472 +0000 UTC m=+89.182576500" watchObservedRunningTime="2026-02-02 10:54:54.862294151 +0000 UTC m=+89.199395219" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.862724 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-ndxhv" podStartSLOduration=64.862714043 podStartE2EDuration="1m4.862714043s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:54:54.861881439 +0000 UTC m=+89.198982507" watchObservedRunningTime="2026-02-02 10:54:54.862714043 +0000 UTC m=+89.199815101" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.875169 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4rcnf" podStartSLOduration=64.875133726 podStartE2EDuration="1m4.875133726s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:54:54.874039785 +0000 UTC m=+89.211140803" watchObservedRunningTime="2026-02-02 10:54:54.875133726 +0000 UTC m=+89.212234774" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.885361 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7b92d62c-fb59-4b70-a267-2b6b56e4870c-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9jx65\" (UID: \"7b92d62c-fb59-4b70-a267-2b6b56e4870c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jx65" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.885491 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/7b92d62c-fb59-4b70-a267-2b6b56e4870c-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9jx65\" (UID: \"7b92d62c-fb59-4b70-a267-2b6b56e4870c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jx65" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.885530 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b92d62c-fb59-4b70-a267-2b6b56e4870c-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9jx65\" (UID: \"7b92d62c-fb59-4b70-a267-2b6b56e4870c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jx65" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.885609 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/7b92d62c-fb59-4b70-a267-2b6b56e4870c-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9jx65\" (UID: \"7b92d62c-fb59-4b70-a267-2b6b56e4870c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jx65" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.885723 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7b92d62c-fb59-4b70-a267-2b6b56e4870c-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9jx65\" (UID: \"7b92d62c-fb59-4b70-a267-2b6b56e4870c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jx65" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.886321 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/7b92d62c-fb59-4b70-a267-2b6b56e4870c-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9jx65\" (UID: \"7b92d62c-fb59-4b70-a267-2b6b56e4870c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jx65" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.886534 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/7b92d62c-fb59-4b70-a267-2b6b56e4870c-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9jx65\" (UID: \"7b92d62c-fb59-4b70-a267-2b6b56e4870c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jx65" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.887118 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7b92d62c-fb59-4b70-a267-2b6b56e4870c-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9jx65\" (UID: \"7b92d62c-fb59-4b70-a267-2b6b56e4870c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jx65" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.896440 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b92d62c-fb59-4b70-a267-2b6b56e4870c-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9jx65\" (UID: \"7b92d62c-fb59-4b70-a267-2b6b56e4870c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jx65" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.921459 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7b92d62c-fb59-4b70-a267-2b6b56e4870c-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9jx65\" (UID: \"7b92d62c-fb59-4b70-a267-2b6b56e4870c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jx65" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.958515 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=63.958493089 podStartE2EDuration="1m3.958493089s" podCreationTimestamp="2026-02-02 10:53:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:54:54.958070737 +0000 UTC m=+89.295171775" watchObservedRunningTime="2026-02-02 10:54:54.958493089 +0000 UTC m=+89.295594127" Feb 02 10:54:54 crc kubenswrapper[4838]: I0202 10:54:54.969392 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=11.969369128 podStartE2EDuration="11.969369128s" podCreationTimestamp="2026-02-02 10:54:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:54:54.968584916 +0000 UTC m=+89.305685954" watchObservedRunningTime="2026-02-02 10:54:54.969369128 +0000 UTC m=+89.306470156" Feb 02 10:54:55 crc kubenswrapper[4838]: I0202 10:54:55.005912 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jx65" Feb 02 10:54:55 crc kubenswrapper[4838]: I0202 10:54:55.091288 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jx65" event={"ID":"7b92d62c-fb59-4b70-a267-2b6b56e4870c","Type":"ContainerStarted","Data":"ec25645102c9a84f63891fb8b63085145e280603bb9f1138775d926e6caeef5a"} Feb 02 10:54:55 crc kubenswrapper[4838]: I0202 10:54:55.496452 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 10:22:50.797993798 +0000 UTC Feb 02 10:54:55 crc kubenswrapper[4838]: I0202 10:54:55.497829 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Feb 02 10:54:55 crc kubenswrapper[4838]: I0202 10:54:55.504998 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:55 crc kubenswrapper[4838]: I0202 10:54:55.505301 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:55 crc kubenswrapper[4838]: E0202 10:54:55.505500 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:55 crc kubenswrapper[4838]: I0202 10:54:55.505207 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:55 crc kubenswrapper[4838]: E0202 10:54:55.505673 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:55 crc kubenswrapper[4838]: E0202 10:54:55.506000 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:55 crc kubenswrapper[4838]: I0202 10:54:55.513954 4838 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Feb 02 10:54:56 crc kubenswrapper[4838]: I0202 10:54:56.095809 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jx65" event={"ID":"7b92d62c-fb59-4b70-a267-2b6b56e4870c","Type":"ContainerStarted","Data":"64a61f177022e329e974110b54697ba684a0d37536e264571bc7879178f0d203"} Feb 02 10:54:56 crc kubenswrapper[4838]: I0202 10:54:56.113403 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jx65" podStartSLOduration=66.113380234 podStartE2EDuration="1m6.113380234s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:54:56.112848939 +0000 UTC m=+90.449949967" watchObservedRunningTime="2026-02-02 10:54:56.113380234 +0000 UTC m=+90.450481272" Feb 02 10:54:56 crc kubenswrapper[4838]: I0202 10:54:56.505332 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:56 crc kubenswrapper[4838]: E0202 10:54:56.506088 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:57 crc kubenswrapper[4838]: I0202 10:54:57.505586 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:57 crc kubenswrapper[4838]: I0202 10:54:57.505706 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:57 crc kubenswrapper[4838]: E0202 10:54:57.505840 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:57 crc kubenswrapper[4838]: E0202 10:54:57.506298 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:57 crc kubenswrapper[4838]: I0202 10:54:57.507394 4838 scope.go:117] "RemoveContainer" containerID="f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588" Feb 02 10:54:57 crc kubenswrapper[4838]: E0202 10:54:57.507612 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-66l9c_openshift-ovn-kubernetes(9bc00b9c-6e31-4f8e-b4ba-44150281ed69)\"" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" Feb 02 10:54:57 crc kubenswrapper[4838]: I0202 10:54:57.507933 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:57 crc kubenswrapper[4838]: E0202 10:54:57.508289 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:54:58 crc kubenswrapper[4838]: I0202 10:54:58.504886 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:54:58 crc kubenswrapper[4838]: E0202 10:54:58.505039 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:54:59 crc kubenswrapper[4838]: I0202 10:54:59.505768 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:54:59 crc kubenswrapper[4838]: E0202 10:54:59.505899 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:54:59 crc kubenswrapper[4838]: I0202 10:54:59.505923 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:54:59 crc kubenswrapper[4838]: I0202 10:54:59.506051 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:54:59 crc kubenswrapper[4838]: E0202 10:54:59.506123 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:54:59 crc kubenswrapper[4838]: E0202 10:54:59.506260 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:00 crc kubenswrapper[4838]: I0202 10:55:00.505155 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:00 crc kubenswrapper[4838]: E0202 10:55:00.505356 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:55:01 crc kubenswrapper[4838]: I0202 10:55:01.505587 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:01 crc kubenswrapper[4838]: E0202 10:55:01.505707 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:55:01 crc kubenswrapper[4838]: I0202 10:55:01.505697 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:01 crc kubenswrapper[4838]: I0202 10:55:01.505733 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:01 crc kubenswrapper[4838]: E0202 10:55:01.505917 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:01 crc kubenswrapper[4838]: E0202 10:55:01.506146 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:02 crc kubenswrapper[4838]: I0202 10:55:02.505210 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:02 crc kubenswrapper[4838]: E0202 10:55:02.506131 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:55:03 crc kubenswrapper[4838]: I0202 10:55:03.505822 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:03 crc kubenswrapper[4838]: I0202 10:55:03.505863 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:03 crc kubenswrapper[4838]: I0202 10:55:03.505939 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:03 crc kubenswrapper[4838]: E0202 10:55:03.506007 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:55:03 crc kubenswrapper[4838]: E0202 10:55:03.506232 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:03 crc kubenswrapper[4838]: E0202 10:55:03.506325 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:04 crc kubenswrapper[4838]: I0202 10:55:04.505162 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:04 crc kubenswrapper[4838]: E0202 10:55:04.505399 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:55:05 crc kubenswrapper[4838]: I0202 10:55:05.505207 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:05 crc kubenswrapper[4838]: E0202 10:55:05.505423 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:55:05 crc kubenswrapper[4838]: I0202 10:55:05.505236 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:05 crc kubenswrapper[4838]: I0202 10:55:05.505722 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:05 crc kubenswrapper[4838]: E0202 10:55:05.505811 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:05 crc kubenswrapper[4838]: E0202 10:55:05.506153 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:06 crc kubenswrapper[4838]: I0202 10:55:06.505651 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:06 crc kubenswrapper[4838]: E0202 10:55:06.508026 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:55:06 crc kubenswrapper[4838]: I0202 10:55:06.533385 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Feb 02 10:55:07 crc kubenswrapper[4838]: I0202 10:55:07.505814 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:07 crc kubenswrapper[4838]: I0202 10:55:07.505867 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:07 crc kubenswrapper[4838]: I0202 10:55:07.505868 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:07 crc kubenswrapper[4838]: E0202 10:55:07.507153 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:07 crc kubenswrapper[4838]: E0202 10:55:07.507295 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:55:07 crc kubenswrapper[4838]: E0202 10:55:07.507322 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:08 crc kubenswrapper[4838]: I0202 10:55:08.505537 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:08 crc kubenswrapper[4838]: E0202 10:55:08.505790 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:55:09 crc kubenswrapper[4838]: I0202 10:55:09.452468 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs\") pod \"network-metrics-daemon-kdnnp\" (UID: \"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\") " pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:09 crc kubenswrapper[4838]: E0202 10:55:09.452766 4838 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 10:55:09 crc kubenswrapper[4838]: E0202 10:55:09.452844 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs podName:c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba nodeName:}" failed. No retries permitted until 2026-02-02 10:56:13.452821653 +0000 UTC m=+167.789922721 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs") pod "network-metrics-daemon-kdnnp" (UID: "c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 10:55:09 crc kubenswrapper[4838]: I0202 10:55:09.505823 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:09 crc kubenswrapper[4838]: I0202 10:55:09.505823 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:09 crc kubenswrapper[4838]: I0202 10:55:09.505893 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:09 crc kubenswrapper[4838]: E0202 10:55:09.506369 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:55:09 crc kubenswrapper[4838]: E0202 10:55:09.506348 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:09 crc kubenswrapper[4838]: E0202 10:55:09.506602 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:10 crc kubenswrapper[4838]: I0202 10:55:10.505180 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:10 crc kubenswrapper[4838]: E0202 10:55:10.506309 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:55:10 crc kubenswrapper[4838]: I0202 10:55:10.506678 4838 scope.go:117] "RemoveContainer" containerID="f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588" Feb 02 10:55:10 crc kubenswrapper[4838]: E0202 10:55:10.506945 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-66l9c_openshift-ovn-kubernetes(9bc00b9c-6e31-4f8e-b4ba-44150281ed69)\"" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" Feb 02 10:55:11 crc kubenswrapper[4838]: I0202 10:55:11.505497 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:11 crc kubenswrapper[4838]: E0202 10:55:11.506263 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:55:11 crc kubenswrapper[4838]: I0202 10:55:11.505698 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:11 crc kubenswrapper[4838]: E0202 10:55:11.506535 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:11 crc kubenswrapper[4838]: I0202 10:55:11.505522 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:11 crc kubenswrapper[4838]: E0202 10:55:11.506811 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:12 crc kubenswrapper[4838]: I0202 10:55:12.505735 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:12 crc kubenswrapper[4838]: E0202 10:55:12.505900 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:55:13 crc kubenswrapper[4838]: I0202 10:55:13.505256 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:13 crc kubenswrapper[4838]: I0202 10:55:13.505329 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:13 crc kubenswrapper[4838]: I0202 10:55:13.505433 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:13 crc kubenswrapper[4838]: E0202 10:55:13.505657 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:55:13 crc kubenswrapper[4838]: E0202 10:55:13.506035 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:13 crc kubenswrapper[4838]: E0202 10:55:13.506345 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:14 crc kubenswrapper[4838]: I0202 10:55:14.505114 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:14 crc kubenswrapper[4838]: E0202 10:55:14.505299 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:55:15 crc kubenswrapper[4838]: I0202 10:55:15.505535 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:15 crc kubenswrapper[4838]: I0202 10:55:15.505602 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:15 crc kubenswrapper[4838]: I0202 10:55:15.505602 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:15 crc kubenswrapper[4838]: E0202 10:55:15.505693 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:55:15 crc kubenswrapper[4838]: E0202 10:55:15.505813 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:15 crc kubenswrapper[4838]: E0202 10:55:15.506273 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:16 crc kubenswrapper[4838]: I0202 10:55:16.505851 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:16 crc kubenswrapper[4838]: E0202 10:55:16.506186 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:55:16 crc kubenswrapper[4838]: I0202 10:55:16.554117 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=10.554094316 podStartE2EDuration="10.554094316s" podCreationTimestamp="2026-02-02 10:55:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:16.551544973 +0000 UTC m=+110.888646061" watchObservedRunningTime="2026-02-02 10:55:16.554094316 +0000 UTC m=+110.891195384" Feb 02 10:55:17 crc kubenswrapper[4838]: I0202 10:55:17.505890 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:17 crc kubenswrapper[4838]: I0202 10:55:17.507141 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:17 crc kubenswrapper[4838]: I0202 10:55:17.507467 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:17 crc kubenswrapper[4838]: E0202 10:55:17.507665 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:55:17 crc kubenswrapper[4838]: E0202 10:55:17.507457 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:17 crc kubenswrapper[4838]: E0202 10:55:17.508101 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:18 crc kubenswrapper[4838]: I0202 10:55:18.505164 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:18 crc kubenswrapper[4838]: E0202 10:55:18.505340 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:55:19 crc kubenswrapper[4838]: I0202 10:55:19.504954 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:19 crc kubenswrapper[4838]: I0202 10:55:19.504975 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:19 crc kubenswrapper[4838]: E0202 10:55:19.505058 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:19 crc kubenswrapper[4838]: I0202 10:55:19.505173 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:19 crc kubenswrapper[4838]: E0202 10:55:19.505211 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:55:19 crc kubenswrapper[4838]: E0202 10:55:19.505382 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:20 crc kubenswrapper[4838]: I0202 10:55:20.505986 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:20 crc kubenswrapper[4838]: E0202 10:55:20.506187 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:55:21 crc kubenswrapper[4838]: I0202 10:55:21.505123 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:21 crc kubenswrapper[4838]: I0202 10:55:21.505229 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:21 crc kubenswrapper[4838]: I0202 10:55:21.505139 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:21 crc kubenswrapper[4838]: E0202 10:55:21.505385 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:55:21 crc kubenswrapper[4838]: E0202 10:55:21.505512 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:21 crc kubenswrapper[4838]: E0202 10:55:21.505643 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:22 crc kubenswrapper[4838]: I0202 10:55:22.505764 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:22 crc kubenswrapper[4838]: E0202 10:55:22.505932 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:55:23 crc kubenswrapper[4838]: I0202 10:55:23.505147 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:23 crc kubenswrapper[4838]: I0202 10:55:23.505281 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:23 crc kubenswrapper[4838]: E0202 10:55:23.505326 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:23 crc kubenswrapper[4838]: I0202 10:55:23.505380 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:23 crc kubenswrapper[4838]: E0202 10:55:23.506082 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:23 crc kubenswrapper[4838]: E0202 10:55:23.506131 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:55:23 crc kubenswrapper[4838]: I0202 10:55:23.506610 4838 scope.go:117] "RemoveContainer" containerID="f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588" Feb 02 10:55:23 crc kubenswrapper[4838]: E0202 10:55:23.506903 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-66l9c_openshift-ovn-kubernetes(9bc00b9c-6e31-4f8e-b4ba-44150281ed69)\"" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" Feb 02 10:55:24 crc kubenswrapper[4838]: I0202 10:55:24.505957 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:24 crc kubenswrapper[4838]: E0202 10:55:24.506448 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:55:25 crc kubenswrapper[4838]: I0202 10:55:25.198901 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ndxhv_ddc2e893-5801-4e73-a5f6-9cc52f733f49/kube-multus/1.log" Feb 02 10:55:25 crc kubenswrapper[4838]: I0202 10:55:25.200155 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ndxhv_ddc2e893-5801-4e73-a5f6-9cc52f733f49/kube-multus/0.log" Feb 02 10:55:25 crc kubenswrapper[4838]: I0202 10:55:25.200305 4838 generic.go:334] "Generic (PLEG): container finished" podID="ddc2e893-5801-4e73-a5f6-9cc52f733f49" containerID="94192e5199deb1455cbc4f14f600e68e7432dc6e1b246d174b7a4cc4b4359770" exitCode=1 Feb 02 10:55:25 crc kubenswrapper[4838]: I0202 10:55:25.200377 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ndxhv" event={"ID":"ddc2e893-5801-4e73-a5f6-9cc52f733f49","Type":"ContainerDied","Data":"94192e5199deb1455cbc4f14f600e68e7432dc6e1b246d174b7a4cc4b4359770"} Feb 02 10:55:25 crc kubenswrapper[4838]: I0202 10:55:25.200437 4838 scope.go:117] "RemoveContainer" containerID="479f481277058cfd72cbcc8d46cdf9a8a6506922772d4894bf5d0782fd4e9108" Feb 02 10:55:25 crc kubenswrapper[4838]: I0202 10:55:25.201148 4838 scope.go:117] "RemoveContainer" containerID="94192e5199deb1455cbc4f14f600e68e7432dc6e1b246d174b7a4cc4b4359770" Feb 02 10:55:25 crc kubenswrapper[4838]: E0202 10:55:25.201521 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-ndxhv_openshift-multus(ddc2e893-5801-4e73-a5f6-9cc52f733f49)\"" pod="openshift-multus/multus-ndxhv" podUID="ddc2e893-5801-4e73-a5f6-9cc52f733f49" Feb 02 10:55:25 crc kubenswrapper[4838]: I0202 10:55:25.505216 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:25 crc kubenswrapper[4838]: I0202 10:55:25.505314 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:25 crc kubenswrapper[4838]: E0202 10:55:25.505410 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:55:25 crc kubenswrapper[4838]: I0202 10:55:25.505316 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:25 crc kubenswrapper[4838]: E0202 10:55:25.505567 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:25 crc kubenswrapper[4838]: E0202 10:55:25.505759 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:26 crc kubenswrapper[4838]: I0202 10:55:26.206692 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ndxhv_ddc2e893-5801-4e73-a5f6-9cc52f733f49/kube-multus/1.log" Feb 02 10:55:26 crc kubenswrapper[4838]: I0202 10:55:26.504938 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:26 crc kubenswrapper[4838]: E0202 10:55:26.507411 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:55:26 crc kubenswrapper[4838]: E0202 10:55:26.517342 4838 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Feb 02 10:55:26 crc kubenswrapper[4838]: E0202 10:55:26.675522 4838 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 10:55:27 crc kubenswrapper[4838]: I0202 10:55:27.505579 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:27 crc kubenswrapper[4838]: I0202 10:55:27.505668 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:27 crc kubenswrapper[4838]: I0202 10:55:27.505671 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:27 crc kubenswrapper[4838]: E0202 10:55:27.505832 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:27 crc kubenswrapper[4838]: E0202 10:55:27.505921 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:27 crc kubenswrapper[4838]: E0202 10:55:27.506066 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:55:28 crc kubenswrapper[4838]: I0202 10:55:28.504908 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:28 crc kubenswrapper[4838]: E0202 10:55:28.505111 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:55:29 crc kubenswrapper[4838]: I0202 10:55:29.505230 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:29 crc kubenswrapper[4838]: I0202 10:55:29.505334 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:29 crc kubenswrapper[4838]: I0202 10:55:29.505375 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:29 crc kubenswrapper[4838]: E0202 10:55:29.505577 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:29 crc kubenswrapper[4838]: E0202 10:55:29.505714 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:55:29 crc kubenswrapper[4838]: E0202 10:55:29.505906 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:30 crc kubenswrapper[4838]: I0202 10:55:30.505545 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:30 crc kubenswrapper[4838]: E0202 10:55:30.505785 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:55:31 crc kubenswrapper[4838]: I0202 10:55:31.505538 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:31 crc kubenswrapper[4838]: I0202 10:55:31.505849 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:31 crc kubenswrapper[4838]: I0202 10:55:31.505558 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:31 crc kubenswrapper[4838]: E0202 10:55:31.505943 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:55:31 crc kubenswrapper[4838]: E0202 10:55:31.506031 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:31 crc kubenswrapper[4838]: E0202 10:55:31.506105 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:31 crc kubenswrapper[4838]: E0202 10:55:31.676995 4838 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 10:55:32 crc kubenswrapper[4838]: I0202 10:55:32.505284 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:32 crc kubenswrapper[4838]: E0202 10:55:32.505492 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:55:33 crc kubenswrapper[4838]: I0202 10:55:33.505075 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:33 crc kubenswrapper[4838]: I0202 10:55:33.505206 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:33 crc kubenswrapper[4838]: I0202 10:55:33.505300 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:33 crc kubenswrapper[4838]: E0202 10:55:33.505464 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:55:33 crc kubenswrapper[4838]: E0202 10:55:33.505607 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:33 crc kubenswrapper[4838]: E0202 10:55:33.506040 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:34 crc kubenswrapper[4838]: I0202 10:55:34.505273 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:34 crc kubenswrapper[4838]: E0202 10:55:34.505602 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:55:34 crc kubenswrapper[4838]: I0202 10:55:34.507105 4838 scope.go:117] "RemoveContainer" containerID="f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588" Feb 02 10:55:35 crc kubenswrapper[4838]: I0202 10:55:35.243597 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovnkube-controller/3.log" Feb 02 10:55:35 crc kubenswrapper[4838]: I0202 10:55:35.246699 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerStarted","Data":"56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d"} Feb 02 10:55:35 crc kubenswrapper[4838]: I0202 10:55:35.247879 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:55:35 crc kubenswrapper[4838]: I0202 10:55:35.293666 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" podStartSLOduration=105.293649166 podStartE2EDuration="1m45.293649166s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:35.293347748 +0000 UTC m=+129.630448776" watchObservedRunningTime="2026-02-02 10:55:35.293649166 +0000 UTC m=+129.630750204" Feb 02 10:55:35 crc kubenswrapper[4838]: I0202 10:55:35.505533 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:35 crc kubenswrapper[4838]: E0202 10:55:35.505772 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:55:35 crc kubenswrapper[4838]: I0202 10:55:35.506093 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:35 crc kubenswrapper[4838]: E0202 10:55:35.506212 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:35 crc kubenswrapper[4838]: I0202 10:55:35.506497 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:35 crc kubenswrapper[4838]: E0202 10:55:35.506653 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:35 crc kubenswrapper[4838]: I0202 10:55:35.648828 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-kdnnp"] Feb 02 10:55:36 crc kubenswrapper[4838]: I0202 10:55:36.251061 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:36 crc kubenswrapper[4838]: E0202 10:55:36.251716 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:36 crc kubenswrapper[4838]: I0202 10:55:36.505500 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:36 crc kubenswrapper[4838]: E0202 10:55:36.507270 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:55:36 crc kubenswrapper[4838]: E0202 10:55:36.678066 4838 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 10:55:37 crc kubenswrapper[4838]: I0202 10:55:37.505488 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:37 crc kubenswrapper[4838]: I0202 10:55:37.505520 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:37 crc kubenswrapper[4838]: I0202 10:55:37.505557 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:37 crc kubenswrapper[4838]: E0202 10:55:37.505711 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:55:37 crc kubenswrapper[4838]: E0202 10:55:37.505829 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:37 crc kubenswrapper[4838]: E0202 10:55:37.506037 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:38 crc kubenswrapper[4838]: I0202 10:55:38.505175 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:38 crc kubenswrapper[4838]: E0202 10:55:38.505374 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:55:38 crc kubenswrapper[4838]: I0202 10:55:38.505988 4838 scope.go:117] "RemoveContainer" containerID="94192e5199deb1455cbc4f14f600e68e7432dc6e1b246d174b7a4cc4b4359770" Feb 02 10:55:39 crc kubenswrapper[4838]: I0202 10:55:39.265263 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ndxhv_ddc2e893-5801-4e73-a5f6-9cc52f733f49/kube-multus/1.log" Feb 02 10:55:39 crc kubenswrapper[4838]: I0202 10:55:39.265786 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ndxhv" event={"ID":"ddc2e893-5801-4e73-a5f6-9cc52f733f49","Type":"ContainerStarted","Data":"2ad4ae78e0719cb061a0ffafcdac325d6c137147b66ef29798b0a50c30341efc"} Feb 02 10:55:39 crc kubenswrapper[4838]: I0202 10:55:39.505858 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:39 crc kubenswrapper[4838]: I0202 10:55:39.505942 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:39 crc kubenswrapper[4838]: I0202 10:55:39.506078 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:39 crc kubenswrapper[4838]: E0202 10:55:39.506221 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:55:39 crc kubenswrapper[4838]: E0202 10:55:39.506541 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:39 crc kubenswrapper[4838]: E0202 10:55:39.506798 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:40 crc kubenswrapper[4838]: I0202 10:55:40.506235 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:40 crc kubenswrapper[4838]: E0202 10:55:40.506556 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 10:55:41 crc kubenswrapper[4838]: I0202 10:55:41.505847 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:41 crc kubenswrapper[4838]: I0202 10:55:41.505910 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:41 crc kubenswrapper[4838]: I0202 10:55:41.505966 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:41 crc kubenswrapper[4838]: E0202 10:55:41.506304 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 10:55:41 crc kubenswrapper[4838]: E0202 10:55:41.506489 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 10:55:41 crc kubenswrapper[4838]: E0202 10:55:41.506778 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kdnnp" podUID="c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba" Feb 02 10:55:42 crc kubenswrapper[4838]: I0202 10:55:42.505473 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:42 crc kubenswrapper[4838]: I0202 10:55:42.514846 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Feb 02 10:55:42 crc kubenswrapper[4838]: I0202 10:55:42.517182 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Feb 02 10:55:43 crc kubenswrapper[4838]: I0202 10:55:43.505439 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:43 crc kubenswrapper[4838]: I0202 10:55:43.505454 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:55:43 crc kubenswrapper[4838]: I0202 10:55:43.505485 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:43 crc kubenswrapper[4838]: I0202 10:55:43.508213 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Feb 02 10:55:43 crc kubenswrapper[4838]: I0202 10:55:43.508582 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Feb 02 10:55:43 crc kubenswrapper[4838]: I0202 10:55:43.509033 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Feb 02 10:55:43 crc kubenswrapper[4838]: I0202 10:55:43.509211 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.023698 4838 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.077152 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-v475b"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.084950 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-v475b" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.093209 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.093951 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.094295 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.094919 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.096261 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.102735 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.110151 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-vh59q"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.110743 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh59q" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.113366 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-t2zxd"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.114063 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-t2zxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.118866 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.119156 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.119332 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.120018 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.120274 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.120555 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.120354 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.120431 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.121205 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.122146 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.122365 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.122545 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.125273 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hxwxd"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.125689 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-w48jn"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.126007 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.126474 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hxwxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.126007 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.127173 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.127470 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-dmwvh"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.128263 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dmwvh" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.128545 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-s7xdh"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.132021 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.132242 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.134867 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.135436 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.135687 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.135872 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.136266 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.136500 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.137027 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.140003 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-2qsm5"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.140282 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-mzp9s"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.140540 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-zfr2j"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.140830 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-s7xdh" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.141572 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.141699 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-j52s8"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.141760 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.141895 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.142037 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.142046 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-2l7rj"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.142243 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.142366 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.142727 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.143022 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-mzp9s" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.143242 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.143508 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-j52s8" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.143518 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.147874 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.147968 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.147999 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.148026 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.148066 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.147859 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.148209 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.148248 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.148257 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.149132 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.151468 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-dtdwq"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.152142 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-dtdwq" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.157002 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.157316 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.157549 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.157001 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.157910 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.163663 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.164411 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.175516 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hxwxd"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.175876 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.177715 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-v475b"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.177987 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.176380 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.184730 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.176485 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.177059 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.178191 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.178245 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.178294 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.178340 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.178406 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.179368 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.179385 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.179449 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.179827 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.179910 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.179969 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.180090 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.180449 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.180664 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.180729 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.182960 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.183941 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.184068 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.184200 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.188828 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.196192 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.196366 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.197945 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.199820 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.199831 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-w48jn"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.199880 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200002 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200091 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200251 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b49e2d2f-5155-49bf-82f3-b68992ebe787-config\") pod \"machine-api-operator-5694c8668f-v475b\" (UID: \"b49e2d2f-5155-49bf-82f3-b68992ebe787\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-v475b" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200274 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxgwz\" (UniqueName: \"kubernetes.io/projected/634059df-4860-4872-9aae-3b71aa2d55b2-kube-api-access-xxgwz\") pod \"openshift-apiserver-operator-796bbdcf4f-hxwxd\" (UID: \"634059df-4860-4872-9aae-3b71aa2d55b2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hxwxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200294 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4186f5d8-e330-4fae-943e-a6abbdb49b96-encryption-config\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200321 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4186f5d8-e330-4fae-943e-a6abbdb49b96-audit-dir\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200336 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa-auth-proxy-config\") pod \"machine-approver-56656f9798-vh59q\" (UID: \"5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh59q" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200353 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03d2fa95-b476-4259-8d2d-69bd31c28da4-serving-cert\") pod \"openshift-config-operator-7777fb866f-dmwvh\" (UID: \"03d2fa95-b476-4259-8d2d-69bd31c28da4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dmwvh" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200368 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db54cce9-ff9d-4772-abf3-01f15ecb8075-config\") pod \"controller-manager-879f6c89f-w48jn\" (UID: \"db54cce9-ff9d-4772-abf3-01f15ecb8075\") " pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200384 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wf255\" (UniqueName: \"kubernetes.io/projected/03d2fa95-b476-4259-8d2d-69bd31c28da4-kube-api-access-wf255\") pod \"openshift-config-operator-7777fb866f-dmwvh\" (UID: \"03d2fa95-b476-4259-8d2d-69bd31c28da4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dmwvh" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200407 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/03d2fa95-b476-4259-8d2d-69bd31c28da4-available-featuregates\") pod \"openshift-config-operator-7777fb866f-dmwvh\" (UID: \"03d2fa95-b476-4259-8d2d-69bd31c28da4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dmwvh" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200421 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bsgc9\" (UniqueName: \"kubernetes.io/projected/b49e2d2f-5155-49bf-82f3-b68992ebe787-kube-api-access-bsgc9\") pod \"machine-api-operator-5694c8668f-v475b\" (UID: \"b49e2d2f-5155-49bf-82f3-b68992ebe787\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-v475b" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200437 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4186f5d8-e330-4fae-943e-a6abbdb49b96-audit-policies\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200452 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbdvv\" (UniqueName: \"kubernetes.io/projected/d74d39da-6fcf-437c-805b-ec416d09e348-kube-api-access-mbdvv\") pod \"authentication-operator-69f744f599-t2zxd\" (UID: \"d74d39da-6fcf-437c-805b-ec416d09e348\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t2zxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200473 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rd6d2\" (UniqueName: \"kubernetes.io/projected/5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa-kube-api-access-rd6d2\") pod \"machine-approver-56656f9798-vh59q\" (UID: \"5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh59q" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200491 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b49e2d2f-5155-49bf-82f3-b68992ebe787-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-v475b\" (UID: \"b49e2d2f-5155-49bf-82f3-b68992ebe787\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-v475b" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200508 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4186f5d8-e330-4fae-943e-a6abbdb49b96-serving-cert\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200523 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d74d39da-6fcf-437c-805b-ec416d09e348-config\") pod \"authentication-operator-69f744f599-t2zxd\" (UID: \"d74d39da-6fcf-437c-805b-ec416d09e348\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t2zxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200539 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4186f5d8-e330-4fae-943e-a6abbdb49b96-etcd-client\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200565 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d74d39da-6fcf-437c-805b-ec416d09e348-serving-cert\") pod \"authentication-operator-69f744f599-t2zxd\" (UID: \"d74d39da-6fcf-437c-805b-ec416d09e348\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t2zxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200586 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d74d39da-6fcf-437c-805b-ec416d09e348-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-t2zxd\" (UID: \"d74d39da-6fcf-437c-805b-ec416d09e348\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t2zxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200602 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/634059df-4860-4872-9aae-3b71aa2d55b2-config\") pod \"openshift-apiserver-operator-796bbdcf4f-hxwxd\" (UID: \"634059df-4860-4872-9aae-3b71aa2d55b2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hxwxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200640 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlbqt\" (UniqueName: \"kubernetes.io/projected/4186f5d8-e330-4fae-943e-a6abbdb49b96-kube-api-access-mlbqt\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200660 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/db54cce9-ff9d-4772-abf3-01f15ecb8075-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-w48jn\" (UID: \"db54cce9-ff9d-4772-abf3-01f15ecb8075\") " pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200678 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssfzm\" (UniqueName: \"kubernetes.io/projected/db54cce9-ff9d-4772-abf3-01f15ecb8075-kube-api-access-ssfzm\") pod \"controller-manager-879f6c89f-w48jn\" (UID: \"db54cce9-ff9d-4772-abf3-01f15ecb8075\") " pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200694 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db54cce9-ff9d-4772-abf3-01f15ecb8075-serving-cert\") pod \"controller-manager-879f6c89f-w48jn\" (UID: \"db54cce9-ff9d-4772-abf3-01f15ecb8075\") " pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200725 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/634059df-4860-4872-9aae-3b71aa2d55b2-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-hxwxd\" (UID: \"634059df-4860-4872-9aae-3b71aa2d55b2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hxwxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200747 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4186f5d8-e330-4fae-943e-a6abbdb49b96-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200766 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4186f5d8-e330-4fae-943e-a6abbdb49b96-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200778 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200784 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/db54cce9-ff9d-4772-abf3-01f15ecb8075-client-ca\") pod \"controller-manager-879f6c89f-w48jn\" (UID: \"db54cce9-ff9d-4772-abf3-01f15ecb8075\") " pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200803 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b49e2d2f-5155-49bf-82f3-b68992ebe787-images\") pod \"machine-api-operator-5694c8668f-v475b\" (UID: \"b49e2d2f-5155-49bf-82f3-b68992ebe787\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-v475b" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200820 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa-machine-approver-tls\") pod \"machine-approver-56656f9798-vh59q\" (UID: \"5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh59q" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200840 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d74d39da-6fcf-437c-805b-ec416d09e348-service-ca-bundle\") pod \"authentication-operator-69f744f599-t2zxd\" (UID: \"d74d39da-6fcf-437c-805b-ec416d09e348\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t2zxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200858 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa-config\") pod \"machine-approver-56656f9798-vh59q\" (UID: \"5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh59q" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.200939 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.201272 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.206350 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.206676 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.206983 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.207021 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.207649 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.207782 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.209659 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-t2zxd"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.212795 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-l9qxf"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.213946 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5nmhg"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.224274 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h2pj9"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.225743 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-4746f"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.226283 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8b9cc"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.224949 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5nmhg" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.228351 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.225815 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h2pj9" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.214950 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.214093 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-l9qxf" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.226844 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4746f" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.228190 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-2qsm5"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.228920 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-27vx7"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.215015 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.215125 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.215543 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.215710 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.224488 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.224960 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.227405 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.227755 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.230727 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.231456 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.231112 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-27vx7" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.231039 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-s7xdh"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.232398 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dhhtj"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.232900 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.233934 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dhhtj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.249923 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.250175 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-fzjz7"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.251083 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-p64qh"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.252261 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-p64qh" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.252369 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdxgd"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.252683 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-fzjz7" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.257240 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.258837 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-cqh2x"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.259901 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdxgd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.266167 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-s84zq"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.266269 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-cqh2x" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.267290 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-c2fjv"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.267858 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.268216 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-s84zq" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.268475 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9ssxs"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.268608 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.269143 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9ssxs" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.270192 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gxndj"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.270679 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gxndj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.271364 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7bcl9"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.272507 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-5fhpx"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.272955 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7bcl9" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.273337 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5fhpx" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.273341 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-fcmcn"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.274372 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-nfzt8"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.274734 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fcmcn" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.275219 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.275754 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.275958 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-nfzt8" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.276226 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vkdls"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.277436 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vkdls" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.277596 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s96vn"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.278039 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.278797 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.279430 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.279989 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-2l7rj"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.280894 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-mzp9s"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.282794 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-dtdwq"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.284193 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-dmwvh"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.285094 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-j52s8"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.286735 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-zzq5h"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.287445 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.287844 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.288084 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-zfr2j"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.289064 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dhhtj"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.290071 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8b9cc"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.291384 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h2pj9"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.292671 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-4746f"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.293677 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-l9qxf"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.294589 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdxgd"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.295876 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.296997 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-s84zq"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.297793 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gxndj"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.298891 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-c2fjv"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.299917 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-nfzt8"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.300909 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-27vx7"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301279 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b49e2d2f-5155-49bf-82f3-b68992ebe787-images\") pod \"machine-api-operator-5694c8668f-v475b\" (UID: \"b49e2d2f-5155-49bf-82f3-b68992ebe787\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-v475b" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301306 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa-machine-approver-tls\") pod \"machine-approver-56656f9798-vh59q\" (UID: \"5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh59q" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301330 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d74d39da-6fcf-437c-805b-ec416d09e348-service-ca-bundle\") pod \"authentication-operator-69f744f599-t2zxd\" (UID: \"d74d39da-6fcf-437c-805b-ec416d09e348\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t2zxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301352 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/eb0d3aa3-09b5-4b68-833d-03218e1794f0-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5nmhg\" (UID: \"eb0d3aa3-09b5-4b68-833d-03218e1794f0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5nmhg" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301370 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa-config\") pod \"machine-approver-56656f9798-vh59q\" (UID: \"5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh59q" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301387 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6p62\" (UniqueName: \"kubernetes.io/projected/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-kube-api-access-r6p62\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301402 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b49e2d2f-5155-49bf-82f3-b68992ebe787-config\") pod \"machine-api-operator-5694c8668f-v475b\" (UID: \"b49e2d2f-5155-49bf-82f3-b68992ebe787\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-v475b" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301420 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxgwz\" (UniqueName: \"kubernetes.io/projected/634059df-4860-4872-9aae-3b71aa2d55b2-kube-api-access-xxgwz\") pod \"openshift-apiserver-operator-796bbdcf4f-hxwxd\" (UID: \"634059df-4860-4872-9aae-3b71aa2d55b2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hxwxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301436 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301453 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4186f5d8-e330-4fae-943e-a6abbdb49b96-encryption-config\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301471 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fss4g\" (UniqueName: \"kubernetes.io/projected/92924f81-e588-47e1-84d1-766c9774f6d1-kube-api-access-fss4g\") pod \"route-controller-manager-6576b87f9c-z6khj\" (UID: \"92924f81-e588-47e1-84d1-766c9774f6d1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301488 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301512 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4186f5d8-e330-4fae-943e-a6abbdb49b96-audit-dir\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301527 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301543 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301559 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa-auth-proxy-config\") pod \"machine-approver-56656f9798-vh59q\" (UID: \"5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh59q" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301574 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/228a6edf-f91c-4f0f-8098-98831284e76c-trusted-ca\") pod \"console-operator-58897d9998-dtdwq\" (UID: \"228a6edf-f91c-4f0f-8098-98831284e76c\") " pod="openshift-console-operator/console-operator-58897d9998-dtdwq" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301589 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6e542c14-1264-46b8-92ab-bc74484549bf-proxy-tls\") pod \"machine-config-controller-84d6567774-4746f\" (UID: \"6e542c14-1264-46b8-92ab-bc74484549bf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4746f" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301604 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/228a6edf-f91c-4f0f-8098-98831284e76c-serving-cert\") pod \"console-operator-58897d9998-dtdwq\" (UID: \"228a6edf-f91c-4f0f-8098-98831284e76c\") " pod="openshift-console-operator/console-operator-58897d9998-dtdwq" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301649 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03d2fa95-b476-4259-8d2d-69bd31c28da4-serving-cert\") pod \"openshift-config-operator-7777fb866f-dmwvh\" (UID: \"03d2fa95-b476-4259-8d2d-69bd31c28da4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dmwvh" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301688 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301704 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6e542c14-1264-46b8-92ab-bc74484549bf-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-4746f\" (UID: \"6e542c14-1264-46b8-92ab-bc74484549bf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4746f" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301730 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db54cce9-ff9d-4772-abf3-01f15ecb8075-config\") pod \"controller-manager-879f6c89f-w48jn\" (UID: \"db54cce9-ff9d-4772-abf3-01f15ecb8075\") " pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301746 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-audit-dir\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301761 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301778 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wf255\" (UniqueName: \"kubernetes.io/projected/03d2fa95-b476-4259-8d2d-69bd31c28da4-kube-api-access-wf255\") pod \"openshift-config-operator-7777fb866f-dmwvh\" (UID: \"03d2fa95-b476-4259-8d2d-69bd31c28da4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dmwvh" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301799 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bc3ee51b-3741-4106-9785-3d2b572ee205-metrics-tls\") pod \"dns-operator-744455d44c-l9qxf\" (UID: \"bc3ee51b-3741-4106-9785-3d2b572ee205\") " pod="openshift-dns-operator/dns-operator-744455d44c-l9qxf" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301814 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4186f5d8-e330-4fae-943e-a6abbdb49b96-audit-policies\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301830 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbdvv\" (UniqueName: \"kubernetes.io/projected/d74d39da-6fcf-437c-805b-ec416d09e348-kube-api-access-mbdvv\") pod \"authentication-operator-69f744f599-t2zxd\" (UID: \"d74d39da-6fcf-437c-805b-ec416d09e348\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t2zxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301844 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/03d2fa95-b476-4259-8d2d-69bd31c28da4-available-featuregates\") pod \"openshift-config-operator-7777fb866f-dmwvh\" (UID: \"03d2fa95-b476-4259-8d2d-69bd31c28da4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dmwvh" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301858 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bsgc9\" (UniqueName: \"kubernetes.io/projected/b49e2d2f-5155-49bf-82f3-b68992ebe787-kube-api-access-bsgc9\") pod \"machine-api-operator-5694c8668f-v475b\" (UID: \"b49e2d2f-5155-49bf-82f3-b68992ebe787\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-v475b" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301881 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rd6d2\" (UniqueName: \"kubernetes.io/projected/5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa-kube-api-access-rd6d2\") pod \"machine-approver-56656f9798-vh59q\" (UID: \"5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh59q" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301899 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b49e2d2f-5155-49bf-82f3-b68992ebe787-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-v475b\" (UID: \"b49e2d2f-5155-49bf-82f3-b68992ebe787\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-v475b" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301915 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/37fe135e-daf4-4d19-9ca6-a33ca7174222-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-j52s8\" (UID: \"37fe135e-daf4-4d19-9ca6-a33ca7174222\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-j52s8" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301931 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4186f5d8-e330-4fae-943e-a6abbdb49b96-serving-cert\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301948 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d74d39da-6fcf-437c-805b-ec416d09e348-config\") pod \"authentication-operator-69f744f599-t2zxd\" (UID: \"d74d39da-6fcf-437c-805b-ec416d09e348\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t2zxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301963 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d74d39da-6fcf-437c-805b-ec416d09e348-serving-cert\") pod \"authentication-operator-69f744f599-t2zxd\" (UID: \"d74d39da-6fcf-437c-805b-ec416d09e348\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t2zxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.301983 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4186f5d8-e330-4fae-943e-a6abbdb49b96-etcd-client\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302001 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvb6z\" (UniqueName: \"kubernetes.io/projected/228a6edf-f91c-4f0f-8098-98831284e76c-kube-api-access-qvb6z\") pod \"console-operator-58897d9998-dtdwq\" (UID: \"228a6edf-f91c-4f0f-8098-98831284e76c\") " pod="openshift-console-operator/console-operator-58897d9998-dtdwq" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302005 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302017 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmljg\" (UniqueName: \"kubernetes.io/projected/37fe135e-daf4-4d19-9ca6-a33ca7174222-kube-api-access-lmljg\") pod \"openshift-controller-manager-operator-756b6f6bc6-j52s8\" (UID: \"37fe135e-daf4-4d19-9ca6-a33ca7174222\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-j52s8" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302076 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jz6m\" (UniqueName: \"kubernetes.io/projected/bc3ee51b-3741-4106-9785-3d2b572ee205-kube-api-access-4jz6m\") pod \"dns-operator-744455d44c-l9qxf\" (UID: \"bc3ee51b-3741-4106-9785-3d2b572ee205\") " pod="openshift-dns-operator/dns-operator-744455d44c-l9qxf" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302102 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d74d39da-6fcf-437c-805b-ec416d09e348-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-t2zxd\" (UID: \"d74d39da-6fcf-437c-805b-ec416d09e348\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t2zxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302126 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/634059df-4860-4872-9aae-3b71aa2d55b2-config\") pod \"openshift-apiserver-operator-796bbdcf4f-hxwxd\" (UID: \"634059df-4860-4872-9aae-3b71aa2d55b2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hxwxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302150 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkfp8\" (UniqueName: \"kubernetes.io/projected/eb0d3aa3-09b5-4b68-833d-03218e1794f0-kube-api-access-vkfp8\") pod \"control-plane-machine-set-operator-78cbb6b69f-5nmhg\" (UID: \"eb0d3aa3-09b5-4b68-833d-03218e1794f0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5nmhg" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302166 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa-config\") pod \"machine-approver-56656f9798-vh59q\" (UID: \"5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh59q" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302182 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92924f81-e588-47e1-84d1-766c9774f6d1-serving-cert\") pod \"route-controller-manager-6576b87f9c-z6khj\" (UID: \"92924f81-e588-47e1-84d1-766c9774f6d1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302198 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-audit-policies\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302214 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37fe135e-daf4-4d19-9ca6-a33ca7174222-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-j52s8\" (UID: \"37fe135e-daf4-4d19-9ca6-a33ca7174222\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-j52s8" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302233 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/228a6edf-f91c-4f0f-8098-98831284e76c-config\") pod \"console-operator-58897d9998-dtdwq\" (UID: \"228a6edf-f91c-4f0f-8098-98831284e76c\") " pod="openshift-console-operator/console-operator-58897d9998-dtdwq" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302250 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92924f81-e588-47e1-84d1-766c9774f6d1-config\") pod \"route-controller-manager-6576b87f9c-z6khj\" (UID: \"92924f81-e588-47e1-84d1-766c9774f6d1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302283 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlbqt\" (UniqueName: \"kubernetes.io/projected/4186f5d8-e330-4fae-943e-a6abbdb49b96-kube-api-access-mlbqt\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302303 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/db54cce9-ff9d-4772-abf3-01f15ecb8075-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-w48jn\" (UID: \"db54cce9-ff9d-4772-abf3-01f15ecb8075\") " pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302322 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302344 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssfzm\" (UniqueName: \"kubernetes.io/projected/db54cce9-ff9d-4772-abf3-01f15ecb8075-kube-api-access-ssfzm\") pod \"controller-manager-879f6c89f-w48jn\" (UID: \"db54cce9-ff9d-4772-abf3-01f15ecb8075\") " pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302352 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b49e2d2f-5155-49bf-82f3-b68992ebe787-images\") pod \"machine-api-operator-5694c8668f-v475b\" (UID: \"b49e2d2f-5155-49bf-82f3-b68992ebe787\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-v475b" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302360 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302406 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/92924f81-e588-47e1-84d1-766c9774f6d1-client-ca\") pod \"route-controller-manager-6576b87f9c-z6khj\" (UID: \"92924f81-e588-47e1-84d1-766c9774f6d1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302434 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db54cce9-ff9d-4772-abf3-01f15ecb8075-serving-cert\") pod \"controller-manager-879f6c89f-w48jn\" (UID: \"db54cce9-ff9d-4772-abf3-01f15ecb8075\") " pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302453 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/634059df-4860-4872-9aae-3b71aa2d55b2-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-hxwxd\" (UID: \"634059df-4860-4872-9aae-3b71aa2d55b2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hxwxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302470 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302488 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302507 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6d8cz\" (UniqueName: \"kubernetes.io/projected/6e542c14-1264-46b8-92ab-bc74484549bf-kube-api-access-6d8cz\") pod \"machine-config-controller-84d6567774-4746f\" (UID: \"6e542c14-1264-46b8-92ab-bc74484549bf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4746f" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302529 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4186f5d8-e330-4fae-943e-a6abbdb49b96-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302549 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302568 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4186f5d8-e330-4fae-943e-a6abbdb49b96-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302584 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/db54cce9-ff9d-4772-abf3-01f15ecb8075-client-ca\") pod \"controller-manager-879f6c89f-w48jn\" (UID: \"db54cce9-ff9d-4772-abf3-01f15ecb8075\") " pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302791 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b49e2d2f-5155-49bf-82f3-b68992ebe787-config\") pod \"machine-api-operator-5694c8668f-v475b\" (UID: \"b49e2d2f-5155-49bf-82f3-b68992ebe787\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-v475b" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.302911 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4186f5d8-e330-4fae-943e-a6abbdb49b96-audit-dir\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.303341 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa-auth-proxy-config\") pod \"machine-approver-56656f9798-vh59q\" (UID: \"5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh59q" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.303384 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5nmhg"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.304229 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/db54cce9-ff9d-4772-abf3-01f15ecb8075-client-ca\") pod \"controller-manager-879f6c89f-w48jn\" (UID: \"db54cce9-ff9d-4772-abf3-01f15ecb8075\") " pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.304652 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4186f5d8-e330-4fae-943e-a6abbdb49b96-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.305044 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4186f5d8-e330-4fae-943e-a6abbdb49b96-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.305071 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4186f5d8-e330-4fae-943e-a6abbdb49b96-audit-policies\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.305447 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/634059df-4860-4872-9aae-3b71aa2d55b2-config\") pod \"openshift-apiserver-operator-796bbdcf4f-hxwxd\" (UID: \"634059df-4860-4872-9aae-3b71aa2d55b2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hxwxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.306143 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d74d39da-6fcf-437c-805b-ec416d09e348-service-ca-bundle\") pod \"authentication-operator-69f744f599-t2zxd\" (UID: \"d74d39da-6fcf-437c-805b-ec416d09e348\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t2zxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.307176 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/db54cce9-ff9d-4772-abf3-01f15ecb8075-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-w48jn\" (UID: \"db54cce9-ff9d-4772-abf3-01f15ecb8075\") " pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.307343 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-fzjz7"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.307751 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03d2fa95-b476-4259-8d2d-69bd31c28da4-serving-cert\") pod \"openshift-config-operator-7777fb866f-dmwvh\" (UID: \"03d2fa95-b476-4259-8d2d-69bd31c28da4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dmwvh" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.307758 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db54cce9-ff9d-4772-abf3-01f15ecb8075-serving-cert\") pod \"controller-manager-879f6c89f-w48jn\" (UID: \"db54cce9-ff9d-4772-abf3-01f15ecb8075\") " pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.308029 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/03d2fa95-b476-4259-8d2d-69bd31c28da4-available-featuregates\") pod \"openshift-config-operator-7777fb866f-dmwvh\" (UID: \"03d2fa95-b476-4259-8d2d-69bd31c28da4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dmwvh" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.308281 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d74d39da-6fcf-437c-805b-ec416d09e348-config\") pod \"authentication-operator-69f744f599-t2zxd\" (UID: \"d74d39da-6fcf-437c-805b-ec416d09e348\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t2zxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.308294 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-p64qh"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.308651 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db54cce9-ff9d-4772-abf3-01f15ecb8075-config\") pod \"controller-manager-879f6c89f-w48jn\" (UID: \"db54cce9-ff9d-4772-abf3-01f15ecb8075\") " pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.308957 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d74d39da-6fcf-437c-805b-ec416d09e348-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-t2zxd\" (UID: \"d74d39da-6fcf-437c-805b-ec416d09e348\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t2zxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.309511 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9ssxs"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.309528 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4186f5d8-e330-4fae-943e-a6abbdb49b96-encryption-config\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.310125 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/634059df-4860-4872-9aae-3b71aa2d55b2-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-hxwxd\" (UID: \"634059df-4860-4872-9aae-3b71aa2d55b2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hxwxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.310960 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa-machine-approver-tls\") pod \"machine-approver-56656f9798-vh59q\" (UID: \"5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh59q" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.311285 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-5fhpx"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.313082 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b49e2d2f-5155-49bf-82f3-b68992ebe787-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-v475b\" (UID: \"b49e2d2f-5155-49bf-82f3-b68992ebe787\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-v475b" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.313080 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4186f5d8-e330-4fae-943e-a6abbdb49b96-serving-cert\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.313376 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4186f5d8-e330-4fae-943e-a6abbdb49b96-etcd-client\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.313965 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vkdls"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.315858 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.319782 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7bcl9"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.322165 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d74d39da-6fcf-437c-805b-ec416d09e348-serving-cert\") pod \"authentication-operator-69f744f599-t2zxd\" (UID: \"d74d39da-6fcf-437c-805b-ec416d09e348\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t2zxd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.327674 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-fcmcn"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.330668 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-zzq5h"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.331597 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.333804 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-s7pcn"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.334515 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-s7pcn" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.334954 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.335988 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s96vn"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.337712 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-5pw6k"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.338303 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-5pw6k" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.339486 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-sz4zx"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.340018 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-sz4zx" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.340890 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-5pw6k"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.341706 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-sz4zx"] Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.346854 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.367529 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.387081 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.403229 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkfp8\" (UniqueName: \"kubernetes.io/projected/eb0d3aa3-09b5-4b68-833d-03218e1794f0-kube-api-access-vkfp8\") pod \"control-plane-machine-set-operator-78cbb6b69f-5nmhg\" (UID: \"eb0d3aa3-09b5-4b68-833d-03218e1794f0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5nmhg" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.403348 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92924f81-e588-47e1-84d1-766c9774f6d1-serving-cert\") pod \"route-controller-manager-6576b87f9c-z6khj\" (UID: \"92924f81-e588-47e1-84d1-766c9774f6d1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.403440 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-audit-policies\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.403522 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37fe135e-daf4-4d19-9ca6-a33ca7174222-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-j52s8\" (UID: \"37fe135e-daf4-4d19-9ca6-a33ca7174222\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-j52s8" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.403605 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/228a6edf-f91c-4f0f-8098-98831284e76c-config\") pod \"console-operator-58897d9998-dtdwq\" (UID: \"228a6edf-f91c-4f0f-8098-98831284e76c\") " pod="openshift-console-operator/console-operator-58897d9998-dtdwq" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.403703 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92924f81-e588-47e1-84d1-766c9774f6d1-config\") pod \"route-controller-manager-6576b87f9c-z6khj\" (UID: \"92924f81-e588-47e1-84d1-766c9774f6d1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.403802 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.403901 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.403973 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/92924f81-e588-47e1-84d1-766c9774f6d1-client-ca\") pod \"route-controller-manager-6576b87f9c-z6khj\" (UID: \"92924f81-e588-47e1-84d1-766c9774f6d1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.404041 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.404123 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.404195 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6d8cz\" (UniqueName: \"kubernetes.io/projected/6e542c14-1264-46b8-92ab-bc74484549bf-kube-api-access-6d8cz\") pod \"machine-config-controller-84d6567774-4746f\" (UID: \"6e542c14-1264-46b8-92ab-bc74484549bf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4746f" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.404270 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.404344 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/eb0d3aa3-09b5-4b68-833d-03218e1794f0-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5nmhg\" (UID: \"eb0d3aa3-09b5-4b68-833d-03218e1794f0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5nmhg" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.404427 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6p62\" (UniqueName: \"kubernetes.io/projected/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-kube-api-access-r6p62\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.404525 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.404603 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fss4g\" (UniqueName: \"kubernetes.io/projected/92924f81-e588-47e1-84d1-766c9774f6d1-kube-api-access-fss4g\") pod \"route-controller-manager-6576b87f9c-z6khj\" (UID: \"92924f81-e588-47e1-84d1-766c9774f6d1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.404709 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.404788 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.404795 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.404852 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-audit-policies\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.404867 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.404894 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/228a6edf-f91c-4f0f-8098-98831284e76c-trusted-ca\") pod \"console-operator-58897d9998-dtdwq\" (UID: \"228a6edf-f91c-4f0f-8098-98831284e76c\") " pod="openshift-console-operator/console-operator-58897d9998-dtdwq" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.404913 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.404919 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6e542c14-1264-46b8-92ab-bc74484549bf-proxy-tls\") pod \"machine-config-controller-84d6567774-4746f\" (UID: \"6e542c14-1264-46b8-92ab-bc74484549bf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4746f" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.404977 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/228a6edf-f91c-4f0f-8098-98831284e76c-serving-cert\") pod \"console-operator-58897d9998-dtdwq\" (UID: \"228a6edf-f91c-4f0f-8098-98831284e76c\") " pod="openshift-console-operator/console-operator-58897d9998-dtdwq" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.405004 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.405024 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6e542c14-1264-46b8-92ab-bc74484549bf-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-4746f\" (UID: \"6e542c14-1264-46b8-92ab-bc74484549bf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4746f" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.405045 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-audit-dir\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.405061 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.405094 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bc3ee51b-3741-4106-9785-3d2b572ee205-metrics-tls\") pod \"dns-operator-744455d44c-l9qxf\" (UID: \"bc3ee51b-3741-4106-9785-3d2b572ee205\") " pod="openshift-dns-operator/dns-operator-744455d44c-l9qxf" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.405144 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/37fe135e-daf4-4d19-9ca6-a33ca7174222-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-j52s8\" (UID: \"37fe135e-daf4-4d19-9ca6-a33ca7174222\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-j52s8" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.405174 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvb6z\" (UniqueName: \"kubernetes.io/projected/228a6edf-f91c-4f0f-8098-98831284e76c-kube-api-access-qvb6z\") pod \"console-operator-58897d9998-dtdwq\" (UID: \"228a6edf-f91c-4f0f-8098-98831284e76c\") " pod="openshift-console-operator/console-operator-58897d9998-dtdwq" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.405196 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmljg\" (UniqueName: \"kubernetes.io/projected/37fe135e-daf4-4d19-9ca6-a33ca7174222-kube-api-access-lmljg\") pod \"openshift-controller-manager-operator-756b6f6bc6-j52s8\" (UID: \"37fe135e-daf4-4d19-9ca6-a33ca7174222\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-j52s8" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.405199 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/228a6edf-f91c-4f0f-8098-98831284e76c-config\") pod \"console-operator-58897d9998-dtdwq\" (UID: \"228a6edf-f91c-4f0f-8098-98831284e76c\") " pod="openshift-console-operator/console-operator-58897d9998-dtdwq" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.405220 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jz6m\" (UniqueName: \"kubernetes.io/projected/bc3ee51b-3741-4106-9785-3d2b572ee205-kube-api-access-4jz6m\") pod \"dns-operator-744455d44c-l9qxf\" (UID: \"bc3ee51b-3741-4106-9785-3d2b572ee205\") " pod="openshift-dns-operator/dns-operator-744455d44c-l9qxf" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.405638 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/92924f81-e588-47e1-84d1-766c9774f6d1-client-ca\") pod \"route-controller-manager-6576b87f9c-z6khj\" (UID: \"92924f81-e588-47e1-84d1-766c9774f6d1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.404463 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.406027 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92924f81-e588-47e1-84d1-766c9774f6d1-config\") pod \"route-controller-manager-6576b87f9c-z6khj\" (UID: \"92924f81-e588-47e1-84d1-766c9774f6d1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.404158 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37fe135e-daf4-4d19-9ca6-a33ca7174222-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-j52s8\" (UID: \"37fe135e-daf4-4d19-9ca6-a33ca7174222\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-j52s8" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.406177 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92924f81-e588-47e1-84d1-766c9774f6d1-serving-cert\") pod \"route-controller-manager-6576b87f9c-z6khj\" (UID: \"92924f81-e588-47e1-84d1-766c9774f6d1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.406409 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-audit-dir\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.407116 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6e542c14-1264-46b8-92ab-bc74484549bf-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-4746f\" (UID: \"6e542c14-1264-46b8-92ab-bc74484549bf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4746f" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.407260 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/228a6edf-f91c-4f0f-8098-98831284e76c-trusted-ca\") pod \"console-operator-58897d9998-dtdwq\" (UID: \"228a6edf-f91c-4f0f-8098-98831284e76c\") " pod="openshift-console-operator/console-operator-58897d9998-dtdwq" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.407964 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6e542c14-1264-46b8-92ab-bc74484549bf-proxy-tls\") pod \"machine-config-controller-84d6567774-4746f\" (UID: \"6e542c14-1264-46b8-92ab-bc74484549bf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4746f" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.408149 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.408356 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/228a6edf-f91c-4f0f-8098-98831284e76c-serving-cert\") pod \"console-operator-58897d9998-dtdwq\" (UID: \"228a6edf-f91c-4f0f-8098-98831284e76c\") " pod="openshift-console-operator/console-operator-58897d9998-dtdwq" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.409078 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.410141 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.410187 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.410700 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.410885 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.410999 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.411232 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/37fe135e-daf4-4d19-9ca6-a33ca7174222-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-j52s8\" (UID: \"37fe135e-daf4-4d19-9ca6-a33ca7174222\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-j52s8" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.411322 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/eb0d3aa3-09b5-4b68-833d-03218e1794f0-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5nmhg\" (UID: \"eb0d3aa3-09b5-4b68-833d-03218e1794f0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5nmhg" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.412637 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.413880 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.427096 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.447679 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.467613 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.479445 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bc3ee51b-3741-4106-9785-3d2b572ee205-metrics-tls\") pod \"dns-operator-744455d44c-l9qxf\" (UID: \"bc3ee51b-3741-4106-9785-3d2b572ee205\") " pod="openshift-dns-operator/dns-operator-744455d44c-l9qxf" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.487798 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.508327 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.527212 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.568007 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.587070 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.628696 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.648146 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.669090 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.688081 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.708740 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.727887 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.748328 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.768757 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.788197 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.808850 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.834470 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.848764 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.867943 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.888492 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.909300 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.928002 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.948903 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.967918 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Feb 02 10:55:46 crc kubenswrapper[4838]: I0202 10:55:46.988677 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.007855 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.027318 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.048415 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.067812 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.088042 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.108127 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.138874 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.148644 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.168000 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.187742 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.209157 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.234792 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.248436 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.267776 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.285898 4838 request.go:700] Waited for 1.016844444s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-etcd-operator/secrets?fieldSelector=metadata.name%3Detcd-client&limit=500&resourceVersion=0 Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.287285 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.307069 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.327251 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.347903 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.367542 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.387745 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.408613 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.428460 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.448297 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.468117 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.488990 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.508409 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.527558 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.547398 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.567408 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.587481 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.607820 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.628249 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.648048 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.668256 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.688850 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.708390 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.728181 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.748980 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.768481 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.787975 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.808081 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.827957 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.848808 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.868198 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.888643 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.923949 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.928817 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.948186 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.968566 4838 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Feb 02 10:55:47 crc kubenswrapper[4838]: I0202 10:55:47.987795 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.035837 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxgwz\" (UniqueName: \"kubernetes.io/projected/634059df-4860-4872-9aae-3b71aa2d55b2-kube-api-access-xxgwz\") pod \"openshift-apiserver-operator-796bbdcf4f-hxwxd\" (UID: \"634059df-4860-4872-9aae-3b71aa2d55b2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hxwxd" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.036227 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hxwxd" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.052981 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rd6d2\" (UniqueName: \"kubernetes.io/projected/5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa-kube-api-access-rd6d2\") pod \"machine-approver-56656f9798-vh59q\" (UID: \"5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh59q" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.074298 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.074319 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wf255\" (UniqueName: \"kubernetes.io/projected/03d2fa95-b476-4259-8d2d-69bd31c28da4-kube-api-access-wf255\") pod \"openshift-config-operator-7777fb866f-dmwvh\" (UID: \"03d2fa95-b476-4259-8d2d-69bd31c28da4\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dmwvh" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.102674 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlbqt\" (UniqueName: \"kubernetes.io/projected/4186f5d8-e330-4fae-943e-a6abbdb49b96-kube-api-access-mlbqt\") pod \"apiserver-7bbb656c7d-4tf2g\" (UID: \"4186f5d8-e330-4fae-943e-a6abbdb49b96\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.131850 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bsgc9\" (UniqueName: \"kubernetes.io/projected/b49e2d2f-5155-49bf-82f3-b68992ebe787-kube-api-access-bsgc9\") pod \"machine-api-operator-5694c8668f-v475b\" (UID: \"b49e2d2f-5155-49bf-82f3-b68992ebe787\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-v475b" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.136403 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssfzm\" (UniqueName: \"kubernetes.io/projected/db54cce9-ff9d-4772-abf3-01f15ecb8075-kube-api-access-ssfzm\") pod \"controller-manager-879f6c89f-w48jn\" (UID: \"db54cce9-ff9d-4772-abf3-01f15ecb8075\") " pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.142956 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbdvv\" (UniqueName: \"kubernetes.io/projected/d74d39da-6fcf-437c-805b-ec416d09e348-kube-api-access-mbdvv\") pod \"authentication-operator-69f744f599-t2zxd\" (UID: \"d74d39da-6fcf-437c-805b-ec416d09e348\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t2zxd" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.150475 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.167440 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.190379 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.208585 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.219474 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-v475b" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.229202 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.238014 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh59q" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.248154 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.256961 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dmwvh" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.267875 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.286663 4838 request.go:700] Waited for 1.946465641s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-dns/secrets?fieldSelector=metadata.name%3Ddns-dockercfg-jwfmh&limit=500&resourceVersion=0 Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.287842 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-t2zxd" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.288163 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.298686 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hxwxd"] Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.307694 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.309710 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh59q" event={"ID":"5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa","Type":"ContainerStarted","Data":"5e7cd18daaccf4817be6faec89be677b14bb6ab00d1a1b5d82bff3b34428014b"} Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.323509 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.328077 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.365601 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.367447 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkfp8\" (UniqueName: \"kubernetes.io/projected/eb0d3aa3-09b5-4b68-833d-03218e1794f0-kube-api-access-vkfp8\") pod \"control-plane-machine-set-operator-78cbb6b69f-5nmhg\" (UID: \"eb0d3aa3-09b5-4b68-833d-03218e1794f0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5nmhg" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.399331 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fss4g\" (UniqueName: \"kubernetes.io/projected/92924f81-e588-47e1-84d1-766c9774f6d1-kube-api-access-fss4g\") pod \"route-controller-manager-6576b87f9c-z6khj\" (UID: \"92924f81-e588-47e1-84d1-766c9774f6d1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.403854 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jz6m\" (UniqueName: \"kubernetes.io/projected/bc3ee51b-3741-4106-9785-3d2b572ee205-kube-api-access-4jz6m\") pod \"dns-operator-744455d44c-l9qxf\" (UID: \"bc3ee51b-3741-4106-9785-3d2b572ee205\") " pod="openshift-dns-operator/dns-operator-744455d44c-l9qxf" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.428865 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6p62\" (UniqueName: \"kubernetes.io/projected/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-kube-api-access-r6p62\") pod \"oauth-openshift-558db77b4-2l7rj\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.431241 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-v475b"] Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.443791 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6d8cz\" (UniqueName: \"kubernetes.io/projected/6e542c14-1264-46b8-92ab-bc74484549bf-kube-api-access-6d8cz\") pod \"machine-config-controller-84d6567774-4746f\" (UID: \"6e542c14-1264-46b8-92ab-bc74484549bf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4746f" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.454395 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.458663 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5nmhg" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.464040 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvb6z\" (UniqueName: \"kubernetes.io/projected/228a6edf-f91c-4f0f-8098-98831284e76c-kube-api-access-qvb6z\") pod \"console-operator-58897d9998-dtdwq\" (UID: \"228a6edf-f91c-4f0f-8098-98831284e76c\") " pod="openshift-console-operator/console-operator-58897d9998-dtdwq" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.482655 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-l9qxf" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.489276 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4746f" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.490191 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmljg\" (UniqueName: \"kubernetes.io/projected/37fe135e-daf4-4d19-9ca6-a33ca7174222-kube-api-access-lmljg\") pod \"openshift-controller-manager-operator-756b6f6bc6-j52s8\" (UID: \"37fe135e-daf4-4d19-9ca6-a33ca7174222\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-j52s8" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.497359 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-dmwvh"] Feb 02 10:55:48 crc kubenswrapper[4838]: W0202 10:55:48.537067 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod03d2fa95_b476_4259_8d2d_69bd31c28da4.slice/crio-46272fc5b3b724602387508905dae55e95044b7b5ae2d91623ffe2b3bf4f63d5 WatchSource:0}: Error finding container 46272fc5b3b724602387508905dae55e95044b7b5ae2d91623ffe2b3bf4f63d5: Status 404 returned error can't find the container with id 46272fc5b3b724602387508905dae55e95044b7b5ae2d91623ffe2b3bf4f63d5 Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.546906 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d24e9722-41b4-4e18-9e40-204ac7e1a67e-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h2pj9\" (UID: \"d24e9722-41b4-4e18-9e40-204ac7e1a67e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h2pj9" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.546953 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b5aa31dc-369f-427e-97a4-b6245df5df4f-image-import-ca\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.546973 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b5aa31dc-369f-427e-97a4-b6245df5df4f-etcd-client\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.546993 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-oauth-serving-cert\") pod \"console-f9d7485db-2qsm5\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547009 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9d7w\" (UniqueName: \"kubernetes.io/projected/3e879c3f-8c95-449b-b9e7-439c78f48209-kube-api-access-v9d7w\") pod \"console-f9d7485db-2qsm5\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547033 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzzk9\" (UniqueName: \"kubernetes.io/projected/0adb948a-923d-44f9-8cad-f36fe04a90b2-kube-api-access-hzzk9\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547049 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/76adb7c0-2e4a-46a7-ac36-9d8eba267cab-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-27vx7\" (UID: \"76adb7c0-2e4a-46a7-ac36-9d8eba267cab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-27vx7" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547063 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3e879c3f-8c95-449b-b9e7-439c78f48209-console-serving-cert\") pod \"console-f9d7485db-2qsm5\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547076 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-service-ca\") pod \"console-f9d7485db-2qsm5\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547104 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b5aa31dc-369f-427e-97a4-b6245df5df4f-audit\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547119 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztrjl\" (UniqueName: \"kubernetes.io/projected/76adb7c0-2e4a-46a7-ac36-9d8eba267cab-kube-api-access-ztrjl\") pod \"cluster-image-registry-operator-dc59b4c8b-27vx7\" (UID: \"76adb7c0-2e4a-46a7-ac36-9d8eba267cab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-27vx7" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547141 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547160 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0adb948a-923d-44f9-8cad-f36fe04a90b2-trusted-ca\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547176 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0adb948a-923d-44f9-8cad-f36fe04a90b2-registry-certificates\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547189 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b5aa31dc-369f-427e-97a4-b6245df5df4f-node-pullsecrets\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547205 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/76adb7c0-2e4a-46a7-ac36-9d8eba267cab-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-27vx7\" (UID: \"76adb7c0-2e4a-46a7-ac36-9d8eba267cab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-27vx7" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547233 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d24e9722-41b4-4e18-9e40-204ac7e1a67e-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h2pj9\" (UID: \"d24e9722-41b4-4e18-9e40-204ac7e1a67e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h2pj9" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547247 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0adb948a-923d-44f9-8cad-f36fe04a90b2-installation-pull-secrets\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547262 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b5aa31dc-369f-427e-97a4-b6245df5df4f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547278 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0adb948a-923d-44f9-8cad-f36fe04a90b2-ca-trust-extracted\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547296 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b5aa31dc-369f-427e-97a4-b6245df5df4f-etcd-serving-ca\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547310 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b5aa31dc-369f-427e-97a4-b6245df5df4f-encryption-config\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547340 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-trusted-ca-bundle\") pod \"console-f9d7485db-2qsm5\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547361 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0adb948a-923d-44f9-8cad-f36fe04a90b2-bound-sa-token\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547386 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d24e9722-41b4-4e18-9e40-204ac7e1a67e-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h2pj9\" (UID: \"d24e9722-41b4-4e18-9e40-204ac7e1a67e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h2pj9" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547423 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-console-config\") pod \"console-f9d7485db-2qsm5\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547438 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrqp6\" (UniqueName: \"kubernetes.io/projected/ec1b1633-82ef-481f-baac-8bb589265b21-kube-api-access-rrqp6\") pod \"cluster-samples-operator-665b6dd947-s7xdh\" (UID: \"ec1b1633-82ef-481f-baac-8bb589265b21\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-s7xdh" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547453 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3e879c3f-8c95-449b-b9e7-439c78f48209-console-oauth-config\") pod \"console-f9d7485db-2qsm5\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547474 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b5aa31dc-369f-427e-97a4-b6245df5df4f-serving-cert\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547488 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/76adb7c0-2e4a-46a7-ac36-9d8eba267cab-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-27vx7\" (UID: \"76adb7c0-2e4a-46a7-ac36-9d8eba267cab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-27vx7" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547511 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0adb948a-923d-44f9-8cad-f36fe04a90b2-registry-tls\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547525 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5aa31dc-369f-427e-97a4-b6245df5df4f-config\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547541 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ec1b1633-82ef-481f-baac-8bb589265b21-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-s7xdh\" (UID: \"ec1b1633-82ef-481f-baac-8bb589265b21\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-s7xdh" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547558 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvw5l\" (UniqueName: \"kubernetes.io/projected/b5aa31dc-369f-427e-97a4-b6245df5df4f-kube-api-access-qvw5l\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547577 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtvqw\" (UniqueName: \"kubernetes.io/projected/ed8b29bf-e322-4cc9-b027-0aea680ce349-kube-api-access-jtvqw\") pod \"downloads-7954f5f757-mzp9s\" (UID: \"ed8b29bf-e322-4cc9-b027-0aea680ce349\") " pod="openshift-console/downloads-7954f5f757-mzp9s" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.547598 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b5aa31dc-369f-427e-97a4-b6245df5df4f-audit-dir\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: E0202 10:55:48.548983 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:49.048972231 +0000 UTC m=+143.386073259 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.648741 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:48 crc kubenswrapper[4838]: E0202 10:55:48.648901 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:49.148880259 +0000 UTC m=+143.485981287 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649153 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjdft\" (UniqueName: \"kubernetes.io/projected/64f909e9-b00b-427e-9fe8-3ebf290d7676-kube-api-access-sjdft\") pod \"kube-storage-version-migrator-operator-b67b599dd-dhhtj\" (UID: \"64f909e9-b00b-427e-9fe8-3ebf290d7676\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dhhtj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649176 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7c1d42f1-afc2-499f-9f51-aae97e6cc10a-profile-collector-cert\") pod \"olm-operator-6b444d44fb-vkdls\" (UID: \"7c1d42f1-afc2-499f-9f51-aae97e6cc10a\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vkdls" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649200 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-trusted-ca-bundle\") pod \"console-f9d7485db-2qsm5\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649217 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d24e9722-41b4-4e18-9e40-204ac7e1a67e-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h2pj9\" (UID: \"d24e9722-41b4-4e18-9e40-204ac7e1a67e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h2pj9" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649386 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/97092ddc-d56d-49b0-b290-30f113972b43-signing-key\") pod \"service-ca-9c57cc56f-nfzt8\" (UID: \"97092ddc-d56d-49b0-b290-30f113972b43\") " pod="openshift-service-ca/service-ca-9c57cc56f-nfzt8" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649405 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/f5e8635f-8114-4ba8-b74c-6dd667ef41df-default-certificate\") pod \"router-default-5444994796-cqh2x\" (UID: \"f5e8635f-8114-4ba8-b74c-6dd667ef41df\") " pod="openshift-ingress/router-default-5444994796-cqh2x" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649422 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f245ed23-8fbf-49f9-a284-6580d1025ceb-config\") pod \"kube-apiserver-operator-766d6c64bb-9ssxs\" (UID: \"f245ed23-8fbf-49f9-a284-6580d1025ceb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9ssxs" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649453 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrqp6\" (UniqueName: \"kubernetes.io/projected/ec1b1633-82ef-481f-baac-8bb589265b21-kube-api-access-rrqp6\") pod \"cluster-samples-operator-665b6dd947-s7xdh\" (UID: \"ec1b1633-82ef-481f-baac-8bb589265b21\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-s7xdh" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649467 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-console-config\") pod \"console-f9d7485db-2qsm5\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649501 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3e879c3f-8c95-449b-b9e7-439c78f48209-console-oauth-config\") pod \"console-f9d7485db-2qsm5\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649518 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2l5n9\" (UniqueName: \"kubernetes.io/projected/4fa1c8c4-4ea6-484c-906a-6e7c8016757b-kube-api-access-2l5n9\") pod \"collect-profiles-29500485-q5vqj\" (UID: \"4fa1c8c4-4ea6-484c-906a-6e7c8016757b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649537 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64f909e9-b00b-427e-9fe8-3ebf290d7676-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-dhhtj\" (UID: \"64f909e9-b00b-427e-9fe8-3ebf290d7676\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dhhtj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649564 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b5aa31dc-369f-427e-97a4-b6245df5df4f-serving-cert\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649598 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5aa31dc-369f-427e-97a4-b6245df5df4f-config\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649631 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ts5pt\" (UniqueName: \"kubernetes.io/projected/99d83385-4586-40c5-af02-d293febdffc2-kube-api-access-ts5pt\") pod \"machine-config-operator-74547568cd-fcmcn\" (UID: \"99d83385-4586-40c5-af02-d293febdffc2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fcmcn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649659 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88cf9601-400d-4dd7-91dd-c2ba110f1731-serving-cert\") pod \"service-ca-operator-777779d784-fzjz7\" (UID: \"88cf9601-400d-4dd7-91dd-c2ba110f1731\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fzjz7" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649678 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d5134e9-c9b5-4959-8092-581e357e5ebf-config\") pod \"etcd-operator-b45778765-c2fjv\" (UID: \"5d5134e9-c9b5-4959-8092-581e357e5ebf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649696 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4fa1c8c4-4ea6-484c-906a-6e7c8016757b-secret-volume\") pod \"collect-profiles-29500485-q5vqj\" (UID: \"4fa1c8c4-4ea6-484c-906a-6e7c8016757b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649712 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/96f87e1d-9edf-44a3-b884-789dd6a3f334-registration-dir\") pod \"csi-hostpathplugin-zzq5h\" (UID: \"96f87e1d-9edf-44a3-b884-789dd6a3f334\") " pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649757 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvw5l\" (UniqueName: \"kubernetes.io/projected/b5aa31dc-369f-427e-97a4-b6245df5df4f-kube-api-access-qvw5l\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649773 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7c1d42f1-afc2-499f-9f51-aae97e6cc10a-srv-cert\") pod \"olm-operator-6b444d44fb-vkdls\" (UID: \"7c1d42f1-afc2-499f-9f51-aae97e6cc10a\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vkdls" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649822 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtvqw\" (UniqueName: \"kubernetes.io/projected/ed8b29bf-e322-4cc9-b027-0aea680ce349-kube-api-access-jtvqw\") pod \"downloads-7954f5f757-mzp9s\" (UID: \"ed8b29bf-e322-4cc9-b027-0aea680ce349\") " pod="openshift-console/downloads-7954f5f757-mzp9s" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649851 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5d5134e9-c9b5-4959-8092-581e357e5ebf-etcd-client\") pod \"etcd-operator-b45778765-c2fjv\" (UID: \"5d5134e9-c9b5-4959-8092-581e357e5ebf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649867 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9z49x\" (UniqueName: \"kubernetes.io/projected/5d5134e9-c9b5-4959-8092-581e357e5ebf-kube-api-access-9z49x\") pod \"etcd-operator-b45778765-c2fjv\" (UID: \"5d5134e9-c9b5-4959-8092-581e357e5ebf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649886 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/f5e8635f-8114-4ba8-b74c-6dd667ef41df-stats-auth\") pod \"router-default-5444994796-cqh2x\" (UID: \"f5e8635f-8114-4ba8-b74c-6dd667ef41df\") " pod="openshift-ingress/router-default-5444994796-cqh2x" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649903 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f27a4923-82be-471e-8b65-a3c325f389bc-srv-cert\") pod \"catalog-operator-68c6474976-bdxgd\" (UID: \"f27a4923-82be-471e-8b65-a3c325f389bc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdxgd" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649922 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v86mv\" (UniqueName: \"kubernetes.io/projected/f5e8635f-8114-4ba8-b74c-6dd667ef41df-kube-api-access-v86mv\") pod \"router-default-5444994796-cqh2x\" (UID: \"f5e8635f-8114-4ba8-b74c-6dd667ef41df\") " pod="openshift-ingress/router-default-5444994796-cqh2x" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649939 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c08401-a807-46f6-9aca-26e535cabae5-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-7bcl9\" (UID: \"49c08401-a807-46f6-9aca-26e535cabae5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7bcl9" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649957 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f5e8635f-8114-4ba8-b74c-6dd667ef41df-service-ca-bundle\") pod \"router-default-5444994796-cqh2x\" (UID: \"f5e8635f-8114-4ba8-b74c-6dd667ef41df\") " pod="openshift-ingress/router-default-5444994796-cqh2x" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.649984 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b5aa31dc-369f-427e-97a4-b6245df5df4f-image-import-ca\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650001 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7dc10e03-1a96-49b1-b3d4-2126af30e87b-config-volume\") pod \"dns-default-sz4zx\" (UID: \"7dc10e03-1a96-49b1-b3d4-2126af30e87b\") " pod="openshift-dns/dns-default-sz4zx" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650021 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pj2f\" (UniqueName: \"kubernetes.io/projected/65691c3c-b0b5-4460-a1d5-f64043ce7122-kube-api-access-7pj2f\") pod \"migrator-59844c95c7-5fhpx\" (UID: \"65691c3c-b0b5-4460-a1d5-f64043ce7122\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5fhpx" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650045 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3e879c3f-8c95-449b-b9e7-439c78f48209-console-serving-cert\") pod \"console-f9d7485db-2qsm5\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650067 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-service-ca\") pod \"console-f9d7485db-2qsm5\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650102 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/99d83385-4586-40c5-af02-d293febdffc2-images\") pod \"machine-config-operator-74547568cd-fcmcn\" (UID: \"99d83385-4586-40c5-af02-d293febdffc2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fcmcn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650120 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6385779-f490-4523-8a93-5c043fb29f56-config\") pod \"kube-controller-manager-operator-78b949d7b-gxndj\" (UID: \"b6385779-f490-4523-8a93-5c043fb29f56\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gxndj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650153 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b5aa31dc-369f-427e-97a4-b6245df5df4f-audit\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650214 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-trusted-ca-bundle\") pod \"console-f9d7485db-2qsm5\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650266 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n78p9\" (UniqueName: \"kubernetes.io/projected/3bffe14a-0216-4854-b0fc-7c482a297b82-kube-api-access-n78p9\") pod \"marketplace-operator-79b997595-s96vn\" (UID: \"3bffe14a-0216-4854-b0fc-7c482a297b82\") " pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650282 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9w4b\" (UniqueName: \"kubernetes.io/projected/7c1d42f1-afc2-499f-9f51-aae97e6cc10a-kube-api-access-m9w4b\") pod \"olm-operator-6b444d44fb-vkdls\" (UID: \"7c1d42f1-afc2-499f-9f51-aae97e6cc10a\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vkdls" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650305 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650320 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/96f87e1d-9edf-44a3-b884-789dd6a3f334-plugins-dir\") pod \"csi-hostpathplugin-zzq5h\" (UID: \"96f87e1d-9edf-44a3-b884-789dd6a3f334\") " pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650358 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cdd0449c-7006-492f-90ff-5c7962dbe6f9-metrics-tls\") pod \"ingress-operator-5b745b69d9-s84zq\" (UID: \"cdd0449c-7006-492f-90ff-5c7962dbe6f9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-s84zq" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650392 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b5aa31dc-369f-427e-97a4-b6245df5df4f-node-pullsecrets\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650410 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/76adb7c0-2e4a-46a7-ac36-9d8eba267cab-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-27vx7\" (UID: \"76adb7c0-2e4a-46a7-ac36-9d8eba267cab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-27vx7" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650428 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b6385779-f490-4523-8a93-5c043fb29f56-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-gxndj\" (UID: \"b6385779-f490-4523-8a93-5c043fb29f56\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gxndj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650446 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d24e9722-41b4-4e18-9e40-204ac7e1a67e-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h2pj9\" (UID: \"d24e9722-41b4-4e18-9e40-204ac7e1a67e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h2pj9" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650565 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/943383ae-0bb8-44ef-91c8-25064ea7907f-apiservice-cert\") pod \"packageserver-d55dfcdfc-zdtxg\" (UID: \"943383ae-0bb8-44ef-91c8-25064ea7907f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650581 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/96f87e1d-9edf-44a3-b884-789dd6a3f334-mountpoint-dir\") pod \"csi-hostpathplugin-zzq5h\" (UID: \"96f87e1d-9edf-44a3-b884-789dd6a3f334\") " pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650633 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b5aa31dc-369f-427e-97a4-b6245df5df4f-etcd-serving-ca\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650656 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b5aa31dc-369f-427e-97a4-b6245df5df4f-encryption-config\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650672 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/943383ae-0bb8-44ef-91c8-25064ea7907f-tmpfs\") pod \"packageserver-d55dfcdfc-zdtxg\" (UID: \"943383ae-0bb8-44ef-91c8-25064ea7907f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650685 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f5e8635f-8114-4ba8-b74c-6dd667ef41df-metrics-certs\") pod \"router-default-5444994796-cqh2x\" (UID: \"f5e8635f-8114-4ba8-b74c-6dd667ef41df\") " pod="openshift-ingress/router-default-5444994796-cqh2x" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650711 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0adb948a-923d-44f9-8cad-f36fe04a90b2-bound-sa-token\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650716 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5aa31dc-369f-427e-97a4-b6245df5df4f-config\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650731 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvzpq\" (UniqueName: \"kubernetes.io/projected/49c08401-a807-46f6-9aca-26e535cabae5-kube-api-access-xvzpq\") pod \"package-server-manager-789f6589d5-7bcl9\" (UID: \"49c08401-a807-46f6-9aca-26e535cabae5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7bcl9" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650783 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/97092ddc-d56d-49b0-b290-30f113972b43-signing-cabundle\") pod \"service-ca-9c57cc56f-nfzt8\" (UID: \"97092ddc-d56d-49b0-b290-30f113972b43\") " pod="openshift-service-ca/service-ca-9c57cc56f-nfzt8" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650926 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/5d5134e9-c9b5-4959-8092-581e357e5ebf-etcd-ca\") pod \"etcd-operator-b45778765-c2fjv\" (UID: \"5d5134e9-c9b5-4959-8092-581e357e5ebf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.650985 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/93c46aec-8376-48e6-9b7c-fd0ef58bfd91-node-bootstrap-token\") pod \"machine-config-server-s7pcn\" (UID: \"93c46aec-8376-48e6-9b7c-fd0ef58bfd91\") " pod="openshift-machine-config-operator/machine-config-server-s7pcn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.651000 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b6385779-f490-4523-8a93-5c043fb29f56-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-gxndj\" (UID: \"b6385779-f490-4523-8a93-5c043fb29f56\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gxndj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.651015 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bs69m\" (UniqueName: \"kubernetes.io/projected/96f87e1d-9edf-44a3-b884-789dd6a3f334-kube-api-access-bs69m\") pod \"csi-hostpathplugin-zzq5h\" (UID: \"96f87e1d-9edf-44a3-b884-789dd6a3f334\") " pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.651029 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2smwm\" (UniqueName: \"kubernetes.io/projected/943383ae-0bb8-44ef-91c8-25064ea7907f-kube-api-access-2smwm\") pod \"packageserver-d55dfcdfc-zdtxg\" (UID: \"943383ae-0bb8-44ef-91c8-25064ea7907f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.651046 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/96f87e1d-9edf-44a3-b884-789dd6a3f334-socket-dir\") pod \"csi-hostpathplugin-zzq5h\" (UID: \"96f87e1d-9edf-44a3-b884-789dd6a3f334\") " pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.651072 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/76adb7c0-2e4a-46a7-ac36-9d8eba267cab-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-27vx7\" (UID: \"76adb7c0-2e4a-46a7-ac36-9d8eba267cab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-27vx7" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.651088 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmstl\" (UniqueName: \"kubernetes.io/projected/88cf9601-400d-4dd7-91dd-c2ba110f1731-kube-api-access-jmstl\") pod \"service-ca-operator-777779d784-fzjz7\" (UID: \"88cf9601-400d-4dd7-91dd-c2ba110f1731\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fzjz7" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.651105 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0adb948a-923d-44f9-8cad-f36fe04a90b2-registry-tls\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.651129 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ec1b1633-82ef-481f-baac-8bb589265b21-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-s7xdh\" (UID: \"ec1b1633-82ef-481f-baac-8bb589265b21\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-s7xdh" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.651146 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3bffe14a-0216-4854-b0fc-7c482a297b82-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-s96vn\" (UID: \"3bffe14a-0216-4854-b0fc-7c482a297b82\") " pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.651164 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3bffe14a-0216-4854-b0fc-7c482a297b82-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-s96vn\" (UID: \"3bffe14a-0216-4854-b0fc-7c482a297b82\") " pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.651178 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cd169950-cc5a-4c99-ae5f-9258e0164f7a-cert\") pod \"ingress-canary-5pw6k\" (UID: \"cd169950-cc5a-4c99-ae5f-9258e0164f7a\") " pod="openshift-ingress-canary/ingress-canary-5pw6k" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.651193 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d5134e9-c9b5-4959-8092-581e357e5ebf-serving-cert\") pod \"etcd-operator-b45778765-c2fjv\" (UID: \"5d5134e9-c9b5-4959-8092-581e357e5ebf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.651209 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64f909e9-b00b-427e-9fe8-3ebf290d7676-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-dhhtj\" (UID: \"64f909e9-b00b-427e-9fe8-3ebf290d7676\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dhhtj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.651234 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwmmk\" (UniqueName: \"kubernetes.io/projected/97092ddc-d56d-49b0-b290-30f113972b43-kube-api-access-hwmmk\") pod \"service-ca-9c57cc56f-nfzt8\" (UID: \"97092ddc-d56d-49b0-b290-30f113972b43\") " pod="openshift-service-ca/service-ca-9c57cc56f-nfzt8" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.651417 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b5aa31dc-369f-427e-97a4-b6245df5df4f-audit\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: E0202 10:55:48.652055 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:49.152037902 +0000 UTC m=+143.489138930 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.652103 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b5aa31dc-369f-427e-97a4-b6245df5df4f-node-pullsecrets\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.653407 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b5aa31dc-369f-427e-97a4-b6245df5df4f-etcd-serving-ca\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.654416 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d24e9722-41b4-4e18-9e40-204ac7e1a67e-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h2pj9\" (UID: \"d24e9722-41b4-4e18-9e40-204ac7e1a67e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h2pj9" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.655190 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-console-config\") pod \"console-f9d7485db-2qsm5\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.655297 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b5aa31dc-369f-427e-97a4-b6245df5df4f-audit-dir\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.655360 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88cf9601-400d-4dd7-91dd-c2ba110f1731-config\") pod \"service-ca-operator-777779d784-fzjz7\" (UID: \"88cf9601-400d-4dd7-91dd-c2ba110f1731\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fzjz7" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.655390 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b5aa31dc-369f-427e-97a4-b6245df5df4f-audit-dir\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.655427 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d24e9722-41b4-4e18-9e40-204ac7e1a67e-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h2pj9\" (UID: \"d24e9722-41b4-4e18-9e40-204ac7e1a67e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h2pj9" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.655471 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b5aa31dc-369f-427e-97a4-b6245df5df4f-image-import-ca\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.655700 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b5aa31dc-369f-427e-97a4-b6245df5df4f-etcd-client\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.655721 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-oauth-serving-cert\") pod \"console-f9d7485db-2qsm5\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.655794 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9d7w\" (UniqueName: \"kubernetes.io/projected/3e879c3f-8c95-449b-b9e7-439c78f48209-kube-api-access-v9d7w\") pod \"console-f9d7485db-2qsm5\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.656113 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzzk9\" (UniqueName: \"kubernetes.io/projected/0adb948a-923d-44f9-8cad-f36fe04a90b2-kube-api-access-hzzk9\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.656183 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/943383ae-0bb8-44ef-91c8-25064ea7907f-webhook-cert\") pod \"packageserver-d55dfcdfc-zdtxg\" (UID: \"943383ae-0bb8-44ef-91c8-25064ea7907f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.656217 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9cc33e8c-fb5d-4006-be95-170ec3d739e9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-p64qh\" (UID: \"9cc33e8c-fb5d-4006-be95-170ec3d739e9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p64qh" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.656281 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/76adb7c0-2e4a-46a7-ac36-9d8eba267cab-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-27vx7\" (UID: \"76adb7c0-2e4a-46a7-ac36-9d8eba267cab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-27vx7" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.656307 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pz8ft\" (UniqueName: \"kubernetes.io/projected/7dc10e03-1a96-49b1-b3d4-2126af30e87b-kube-api-access-pz8ft\") pod \"dns-default-sz4zx\" (UID: \"7dc10e03-1a96-49b1-b3d4-2126af30e87b\") " pod="openshift-dns/dns-default-sz4zx" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.656392 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-oauth-serving-cert\") pod \"console-f9d7485db-2qsm5\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.656435 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cdd0449c-7006-492f-90ff-5c7962dbe6f9-trusted-ca\") pod \"ingress-operator-5b745b69d9-s84zq\" (UID: \"cdd0449c-7006-492f-90ff-5c7962dbe6f9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-s84zq" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.656779 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztrjl\" (UniqueName: \"kubernetes.io/projected/76adb7c0-2e4a-46a7-ac36-9d8eba267cab-kube-api-access-ztrjl\") pod \"cluster-image-registry-operator-dc59b4c8b-27vx7\" (UID: \"76adb7c0-2e4a-46a7-ac36-9d8eba267cab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-27vx7" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.656816 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/93c46aec-8376-48e6-9b7c-fd0ef58bfd91-certs\") pod \"machine-config-server-s7pcn\" (UID: \"93c46aec-8376-48e6-9b7c-fd0ef58bfd91\") " pod="openshift-machine-config-operator/machine-config-server-s7pcn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.656860 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/99d83385-4586-40c5-af02-d293febdffc2-auth-proxy-config\") pod \"machine-config-operator-74547568cd-fcmcn\" (UID: \"99d83385-4586-40c5-af02-d293febdffc2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fcmcn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.656890 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0adb948a-923d-44f9-8cad-f36fe04a90b2-trusted-ca\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.656921 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4fa1c8c4-4ea6-484c-906a-6e7c8016757b-config-volume\") pod \"collect-profiles-29500485-q5vqj\" (UID: \"4fa1c8c4-4ea6-484c-906a-6e7c8016757b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.656932 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-service-ca\") pod \"console-f9d7485db-2qsm5\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.656940 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cdd0449c-7006-492f-90ff-5c7962dbe6f9-bound-sa-token\") pod \"ingress-operator-5b745b69d9-s84zq\" (UID: \"cdd0449c-7006-492f-90ff-5c7962dbe6f9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-s84zq" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.659353 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0adb948a-923d-44f9-8cad-f36fe04a90b2-trusted-ca\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.659410 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f245ed23-8fbf-49f9-a284-6580d1025ceb-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-9ssxs\" (UID: \"f245ed23-8fbf-49f9-a284-6580d1025ceb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9ssxs" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.659441 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0adb948a-923d-44f9-8cad-f36fe04a90b2-registry-certificates\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.659501 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxfq8\" (UniqueName: \"kubernetes.io/projected/f27a4923-82be-471e-8b65-a3c325f389bc-kube-api-access-cxfq8\") pod \"catalog-operator-68c6474976-bdxgd\" (UID: \"f27a4923-82be-471e-8b65-a3c325f389bc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdxgd" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.659681 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9l8bc\" (UniqueName: \"kubernetes.io/projected/cdd0449c-7006-492f-90ff-5c7962dbe6f9-kube-api-access-9l8bc\") pod \"ingress-operator-5b745b69d9-s84zq\" (UID: \"cdd0449c-7006-492f-90ff-5c7962dbe6f9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-s84zq" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.659717 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6d8v\" (UniqueName: \"kubernetes.io/projected/cd169950-cc5a-4c99-ae5f-9258e0164f7a-kube-api-access-c6d8v\") pod \"ingress-canary-5pw6k\" (UID: \"cd169950-cc5a-4c99-ae5f-9258e0164f7a\") " pod="openshift-ingress-canary/ingress-canary-5pw6k" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.659739 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f245ed23-8fbf-49f9-a284-6580d1025ceb-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-9ssxs\" (UID: \"f245ed23-8fbf-49f9-a284-6580d1025ceb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9ssxs" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.659783 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0adb948a-923d-44f9-8cad-f36fe04a90b2-installation-pull-secrets\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.659809 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/99d83385-4586-40c5-af02-d293febdffc2-proxy-tls\") pod \"machine-config-operator-74547568cd-fcmcn\" (UID: \"99d83385-4586-40c5-af02-d293febdffc2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fcmcn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.659918 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7dc10e03-1a96-49b1-b3d4-2126af30e87b-metrics-tls\") pod \"dns-default-sz4zx\" (UID: \"7dc10e03-1a96-49b1-b3d4-2126af30e87b\") " pod="openshift-dns/dns-default-sz4zx" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.659959 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b5aa31dc-369f-427e-97a4-b6245df5df4f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.659983 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g25l9\" (UniqueName: \"kubernetes.io/projected/93c46aec-8376-48e6-9b7c-fd0ef58bfd91-kube-api-access-g25l9\") pod \"machine-config-server-s7pcn\" (UID: \"93c46aec-8376-48e6-9b7c-fd0ef58bfd91\") " pod="openshift-machine-config-operator/machine-config-server-s7pcn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.660004 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbxvc\" (UniqueName: \"kubernetes.io/projected/9cc33e8c-fb5d-4006-be95-170ec3d739e9-kube-api-access-bbxvc\") pod \"multus-admission-controller-857f4d67dd-p64qh\" (UID: \"9cc33e8c-fb5d-4006-be95-170ec3d739e9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p64qh" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.660034 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0adb948a-923d-44f9-8cad-f36fe04a90b2-ca-trust-extracted\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.660055 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/5d5134e9-c9b5-4959-8092-581e357e5ebf-etcd-service-ca\") pod \"etcd-operator-b45778765-c2fjv\" (UID: \"5d5134e9-c9b5-4959-8092-581e357e5ebf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.660106 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f27a4923-82be-471e-8b65-a3c325f389bc-profile-collector-cert\") pod \"catalog-operator-68c6474976-bdxgd\" (UID: \"f27a4923-82be-471e-8b65-a3c325f389bc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdxgd" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.660130 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/96f87e1d-9edf-44a3-b884-789dd6a3f334-csi-data-dir\") pod \"csi-hostpathplugin-zzq5h\" (UID: \"96f87e1d-9edf-44a3-b884-789dd6a3f334\") " pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.661689 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0adb948a-923d-44f9-8cad-f36fe04a90b2-registry-certificates\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.663809 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/76adb7c0-2e4a-46a7-ac36-9d8eba267cab-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-27vx7\" (UID: \"76adb7c0-2e4a-46a7-ac36-9d8eba267cab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-27vx7" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.665470 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0adb948a-923d-44f9-8cad-f36fe04a90b2-installation-pull-secrets\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.667740 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b5aa31dc-369f-427e-97a4-b6245df5df4f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.668471 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b5aa31dc-369f-427e-97a4-b6245df5df4f-serving-cert\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.668880 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3e879c3f-8c95-449b-b9e7-439c78f48209-console-oauth-config\") pod \"console-f9d7485db-2qsm5\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.669272 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0adb948a-923d-44f9-8cad-f36fe04a90b2-registry-tls\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.669625 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/76adb7c0-2e4a-46a7-ac36-9d8eba267cab-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-27vx7\" (UID: \"76adb7c0-2e4a-46a7-ac36-9d8eba267cab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-27vx7" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.669966 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d24e9722-41b4-4e18-9e40-204ac7e1a67e-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h2pj9\" (UID: \"d24e9722-41b4-4e18-9e40-204ac7e1a67e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h2pj9" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.670272 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0adb948a-923d-44f9-8cad-f36fe04a90b2-ca-trust-extracted\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.670876 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b5aa31dc-369f-427e-97a4-b6245df5df4f-etcd-client\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.672129 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b5aa31dc-369f-427e-97a4-b6245df5df4f-encryption-config\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.673426 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3e879c3f-8c95-449b-b9e7-439c78f48209-console-serving-cert\") pod \"console-f9d7485db-2qsm5\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.673856 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ec1b1633-82ef-481f-baac-8bb589265b21-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-s7xdh\" (UID: \"ec1b1633-82ef-481f-baac-8bb589265b21\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-s7xdh" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.685200 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.693070 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d24e9722-41b4-4e18-9e40-204ac7e1a67e-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h2pj9\" (UID: \"d24e9722-41b4-4e18-9e40-204ac7e1a67e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h2pj9" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.704168 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtvqw\" (UniqueName: \"kubernetes.io/projected/ed8b29bf-e322-4cc9-b027-0aea680ce349-kube-api-access-jtvqw\") pod \"downloads-7954f5f757-mzp9s\" (UID: \"ed8b29bf-e322-4cc9-b027-0aea680ce349\") " pod="openshift-console/downloads-7954f5f757-mzp9s" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.704359 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5nmhg"] Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.725984 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrqp6\" (UniqueName: \"kubernetes.io/projected/ec1b1633-82ef-481f-baac-8bb589265b21-kube-api-access-rrqp6\") pod \"cluster-samples-operator-665b6dd947-s7xdh\" (UID: \"ec1b1633-82ef-481f-baac-8bb589265b21\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-s7xdh" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.744503 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvw5l\" (UniqueName: \"kubernetes.io/projected/b5aa31dc-369f-427e-97a4-b6245df5df4f-kube-api-access-qvw5l\") pod \"apiserver-76f77b778f-zfr2j\" (UID: \"b5aa31dc-369f-427e-97a4-b6245df5df4f\") " pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.744752 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-j52s8" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.747574 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-dtdwq" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.754336 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj"] Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.758961 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-w48jn"] Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.760854 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:48 crc kubenswrapper[4838]: E0202 10:55:48.760881 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:49.260841361 +0000 UTC m=+143.597942389 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761133 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b6385779-f490-4523-8a93-5c043fb29f56-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-gxndj\" (UID: \"b6385779-f490-4523-8a93-5c043fb29f56\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gxndj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761174 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bs69m\" (UniqueName: \"kubernetes.io/projected/96f87e1d-9edf-44a3-b884-789dd6a3f334-kube-api-access-bs69m\") pod \"csi-hostpathplugin-zzq5h\" (UID: \"96f87e1d-9edf-44a3-b884-789dd6a3f334\") " pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761191 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2smwm\" (UniqueName: \"kubernetes.io/projected/943383ae-0bb8-44ef-91c8-25064ea7907f-kube-api-access-2smwm\") pod \"packageserver-d55dfcdfc-zdtxg\" (UID: \"943383ae-0bb8-44ef-91c8-25064ea7907f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761206 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/93c46aec-8376-48e6-9b7c-fd0ef58bfd91-node-bootstrap-token\") pod \"machine-config-server-s7pcn\" (UID: \"93c46aec-8376-48e6-9b7c-fd0ef58bfd91\") " pod="openshift-machine-config-operator/machine-config-server-s7pcn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761252 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/96f87e1d-9edf-44a3-b884-789dd6a3f334-socket-dir\") pod \"csi-hostpathplugin-zzq5h\" (UID: \"96f87e1d-9edf-44a3-b884-789dd6a3f334\") " pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761270 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmstl\" (UniqueName: \"kubernetes.io/projected/88cf9601-400d-4dd7-91dd-c2ba110f1731-kube-api-access-jmstl\") pod \"service-ca-operator-777779d784-fzjz7\" (UID: \"88cf9601-400d-4dd7-91dd-c2ba110f1731\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fzjz7" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761290 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3bffe14a-0216-4854-b0fc-7c482a297b82-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-s96vn\" (UID: \"3bffe14a-0216-4854-b0fc-7c482a297b82\") " pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761330 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cd169950-cc5a-4c99-ae5f-9258e0164f7a-cert\") pod \"ingress-canary-5pw6k\" (UID: \"cd169950-cc5a-4c99-ae5f-9258e0164f7a\") " pod="openshift-ingress-canary/ingress-canary-5pw6k" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761346 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d5134e9-c9b5-4959-8092-581e357e5ebf-serving-cert\") pod \"etcd-operator-b45778765-c2fjv\" (UID: \"5d5134e9-c9b5-4959-8092-581e357e5ebf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761360 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3bffe14a-0216-4854-b0fc-7c482a297b82-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-s96vn\" (UID: \"3bffe14a-0216-4854-b0fc-7c482a297b82\") " pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761377 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64f909e9-b00b-427e-9fe8-3ebf290d7676-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-dhhtj\" (UID: \"64f909e9-b00b-427e-9fe8-3ebf290d7676\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dhhtj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761417 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwmmk\" (UniqueName: \"kubernetes.io/projected/97092ddc-d56d-49b0-b290-30f113972b43-kube-api-access-hwmmk\") pod \"service-ca-9c57cc56f-nfzt8\" (UID: \"97092ddc-d56d-49b0-b290-30f113972b43\") " pod="openshift-service-ca/service-ca-9c57cc56f-nfzt8" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761443 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88cf9601-400d-4dd7-91dd-c2ba110f1731-config\") pod \"service-ca-operator-777779d784-fzjz7\" (UID: \"88cf9601-400d-4dd7-91dd-c2ba110f1731\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fzjz7" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761499 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/943383ae-0bb8-44ef-91c8-25064ea7907f-webhook-cert\") pod \"packageserver-d55dfcdfc-zdtxg\" (UID: \"943383ae-0bb8-44ef-91c8-25064ea7907f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761515 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9cc33e8c-fb5d-4006-be95-170ec3d739e9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-p64qh\" (UID: \"9cc33e8c-fb5d-4006-be95-170ec3d739e9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p64qh" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761532 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pz8ft\" (UniqueName: \"kubernetes.io/projected/7dc10e03-1a96-49b1-b3d4-2126af30e87b-kube-api-access-pz8ft\") pod \"dns-default-sz4zx\" (UID: \"7dc10e03-1a96-49b1-b3d4-2126af30e87b\") " pod="openshift-dns/dns-default-sz4zx" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761571 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cdd0449c-7006-492f-90ff-5c7962dbe6f9-trusted-ca\") pod \"ingress-operator-5b745b69d9-s84zq\" (UID: \"cdd0449c-7006-492f-90ff-5c7962dbe6f9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-s84zq" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761598 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/93c46aec-8376-48e6-9b7c-fd0ef58bfd91-certs\") pod \"machine-config-server-s7pcn\" (UID: \"93c46aec-8376-48e6-9b7c-fd0ef58bfd91\") " pod="openshift-machine-config-operator/machine-config-server-s7pcn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761637 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/99d83385-4586-40c5-af02-d293febdffc2-auth-proxy-config\") pod \"machine-config-operator-74547568cd-fcmcn\" (UID: \"99d83385-4586-40c5-af02-d293febdffc2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fcmcn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761658 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4fa1c8c4-4ea6-484c-906a-6e7c8016757b-config-volume\") pod \"collect-profiles-29500485-q5vqj\" (UID: \"4fa1c8c4-4ea6-484c-906a-6e7c8016757b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761677 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f245ed23-8fbf-49f9-a284-6580d1025ceb-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-9ssxs\" (UID: \"f245ed23-8fbf-49f9-a284-6580d1025ceb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9ssxs" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761760 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cdd0449c-7006-492f-90ff-5c7962dbe6f9-bound-sa-token\") pod \"ingress-operator-5b745b69d9-s84zq\" (UID: \"cdd0449c-7006-492f-90ff-5c7962dbe6f9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-s84zq" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761780 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxfq8\" (UniqueName: \"kubernetes.io/projected/f27a4923-82be-471e-8b65-a3c325f389bc-kube-api-access-cxfq8\") pod \"catalog-operator-68c6474976-bdxgd\" (UID: \"f27a4923-82be-471e-8b65-a3c325f389bc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdxgd" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761819 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9l8bc\" (UniqueName: \"kubernetes.io/projected/cdd0449c-7006-492f-90ff-5c7962dbe6f9-kube-api-access-9l8bc\") pod \"ingress-operator-5b745b69d9-s84zq\" (UID: \"cdd0449c-7006-492f-90ff-5c7962dbe6f9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-s84zq" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761835 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6d8v\" (UniqueName: \"kubernetes.io/projected/cd169950-cc5a-4c99-ae5f-9258e0164f7a-kube-api-access-c6d8v\") pod \"ingress-canary-5pw6k\" (UID: \"cd169950-cc5a-4c99-ae5f-9258e0164f7a\") " pod="openshift-ingress-canary/ingress-canary-5pw6k" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761850 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f245ed23-8fbf-49f9-a284-6580d1025ceb-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-9ssxs\" (UID: \"f245ed23-8fbf-49f9-a284-6580d1025ceb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9ssxs" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761898 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/99d83385-4586-40c5-af02-d293febdffc2-proxy-tls\") pod \"machine-config-operator-74547568cd-fcmcn\" (UID: \"99d83385-4586-40c5-af02-d293febdffc2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fcmcn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761914 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7dc10e03-1a96-49b1-b3d4-2126af30e87b-metrics-tls\") pod \"dns-default-sz4zx\" (UID: \"7dc10e03-1a96-49b1-b3d4-2126af30e87b\") " pod="openshift-dns/dns-default-sz4zx" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761932 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g25l9\" (UniqueName: \"kubernetes.io/projected/93c46aec-8376-48e6-9b7c-fd0ef58bfd91-kube-api-access-g25l9\") pod \"machine-config-server-s7pcn\" (UID: \"93c46aec-8376-48e6-9b7c-fd0ef58bfd91\") " pod="openshift-machine-config-operator/machine-config-server-s7pcn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761966 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbxvc\" (UniqueName: \"kubernetes.io/projected/9cc33e8c-fb5d-4006-be95-170ec3d739e9-kube-api-access-bbxvc\") pod \"multus-admission-controller-857f4d67dd-p64qh\" (UID: \"9cc33e8c-fb5d-4006-be95-170ec3d739e9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p64qh" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.761986 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/5d5134e9-c9b5-4959-8092-581e357e5ebf-etcd-service-ca\") pod \"etcd-operator-b45778765-c2fjv\" (UID: \"5d5134e9-c9b5-4959-8092-581e357e5ebf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762002 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f27a4923-82be-471e-8b65-a3c325f389bc-profile-collector-cert\") pod \"catalog-operator-68c6474976-bdxgd\" (UID: \"f27a4923-82be-471e-8b65-a3c325f389bc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdxgd" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762018 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/96f87e1d-9edf-44a3-b884-789dd6a3f334-csi-data-dir\") pod \"csi-hostpathplugin-zzq5h\" (UID: \"96f87e1d-9edf-44a3-b884-789dd6a3f334\") " pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762059 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjdft\" (UniqueName: \"kubernetes.io/projected/64f909e9-b00b-427e-9fe8-3ebf290d7676-kube-api-access-sjdft\") pod \"kube-storage-version-migrator-operator-b67b599dd-dhhtj\" (UID: \"64f909e9-b00b-427e-9fe8-3ebf290d7676\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dhhtj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762080 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7c1d42f1-afc2-499f-9f51-aae97e6cc10a-profile-collector-cert\") pod \"olm-operator-6b444d44fb-vkdls\" (UID: \"7c1d42f1-afc2-499f-9f51-aae97e6cc10a\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vkdls" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762123 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/97092ddc-d56d-49b0-b290-30f113972b43-signing-key\") pod \"service-ca-9c57cc56f-nfzt8\" (UID: \"97092ddc-d56d-49b0-b290-30f113972b43\") " pod="openshift-service-ca/service-ca-9c57cc56f-nfzt8" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762141 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/f5e8635f-8114-4ba8-b74c-6dd667ef41df-default-certificate\") pod \"router-default-5444994796-cqh2x\" (UID: \"f5e8635f-8114-4ba8-b74c-6dd667ef41df\") " pod="openshift-ingress/router-default-5444994796-cqh2x" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762155 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f245ed23-8fbf-49f9-a284-6580d1025ceb-config\") pod \"kube-apiserver-operator-766d6c64bb-9ssxs\" (UID: \"f245ed23-8fbf-49f9-a284-6580d1025ceb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9ssxs" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762197 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2l5n9\" (UniqueName: \"kubernetes.io/projected/4fa1c8c4-4ea6-484c-906a-6e7c8016757b-kube-api-access-2l5n9\") pod \"collect-profiles-29500485-q5vqj\" (UID: \"4fa1c8c4-4ea6-484c-906a-6e7c8016757b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762214 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64f909e9-b00b-427e-9fe8-3ebf290d7676-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-dhhtj\" (UID: \"64f909e9-b00b-427e-9fe8-3ebf290d7676\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dhhtj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762241 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ts5pt\" (UniqueName: \"kubernetes.io/projected/99d83385-4586-40c5-af02-d293febdffc2-kube-api-access-ts5pt\") pod \"machine-config-operator-74547568cd-fcmcn\" (UID: \"99d83385-4586-40c5-af02-d293febdffc2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fcmcn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762276 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88cf9601-400d-4dd7-91dd-c2ba110f1731-serving-cert\") pod \"service-ca-operator-777779d784-fzjz7\" (UID: \"88cf9601-400d-4dd7-91dd-c2ba110f1731\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fzjz7" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762292 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d5134e9-c9b5-4959-8092-581e357e5ebf-config\") pod \"etcd-operator-b45778765-c2fjv\" (UID: \"5d5134e9-c9b5-4959-8092-581e357e5ebf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762307 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4fa1c8c4-4ea6-484c-906a-6e7c8016757b-secret-volume\") pod \"collect-profiles-29500485-q5vqj\" (UID: \"4fa1c8c4-4ea6-484c-906a-6e7c8016757b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762320 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/96f87e1d-9edf-44a3-b884-789dd6a3f334-registration-dir\") pod \"csi-hostpathplugin-zzq5h\" (UID: \"96f87e1d-9edf-44a3-b884-789dd6a3f334\") " pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762356 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7c1d42f1-afc2-499f-9f51-aae97e6cc10a-srv-cert\") pod \"olm-operator-6b444d44fb-vkdls\" (UID: \"7c1d42f1-afc2-499f-9f51-aae97e6cc10a\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vkdls" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762374 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5d5134e9-c9b5-4959-8092-581e357e5ebf-etcd-client\") pod \"etcd-operator-b45778765-c2fjv\" (UID: \"5d5134e9-c9b5-4959-8092-581e357e5ebf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762390 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9z49x\" (UniqueName: \"kubernetes.io/projected/5d5134e9-c9b5-4959-8092-581e357e5ebf-kube-api-access-9z49x\") pod \"etcd-operator-b45778765-c2fjv\" (UID: \"5d5134e9-c9b5-4959-8092-581e357e5ebf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762407 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/f5e8635f-8114-4ba8-b74c-6dd667ef41df-stats-auth\") pod \"router-default-5444994796-cqh2x\" (UID: \"f5e8635f-8114-4ba8-b74c-6dd667ef41df\") " pod="openshift-ingress/router-default-5444994796-cqh2x" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762441 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f27a4923-82be-471e-8b65-a3c325f389bc-srv-cert\") pod \"catalog-operator-68c6474976-bdxgd\" (UID: \"f27a4923-82be-471e-8b65-a3c325f389bc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdxgd" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762458 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v86mv\" (UniqueName: \"kubernetes.io/projected/f5e8635f-8114-4ba8-b74c-6dd667ef41df-kube-api-access-v86mv\") pod \"router-default-5444994796-cqh2x\" (UID: \"f5e8635f-8114-4ba8-b74c-6dd667ef41df\") " pod="openshift-ingress/router-default-5444994796-cqh2x" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762475 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f5e8635f-8114-4ba8-b74c-6dd667ef41df-service-ca-bundle\") pod \"router-default-5444994796-cqh2x\" (UID: \"f5e8635f-8114-4ba8-b74c-6dd667ef41df\") " pod="openshift-ingress/router-default-5444994796-cqh2x" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762518 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c08401-a807-46f6-9aca-26e535cabae5-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-7bcl9\" (UID: \"49c08401-a807-46f6-9aca-26e535cabae5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7bcl9" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762541 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7dc10e03-1a96-49b1-b3d4-2126af30e87b-config-volume\") pod \"dns-default-sz4zx\" (UID: \"7dc10e03-1a96-49b1-b3d4-2126af30e87b\") " pod="openshift-dns/dns-default-sz4zx" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762562 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pj2f\" (UniqueName: \"kubernetes.io/projected/65691c3c-b0b5-4460-a1d5-f64043ce7122-kube-api-access-7pj2f\") pod \"migrator-59844c95c7-5fhpx\" (UID: \"65691c3c-b0b5-4460-a1d5-f64043ce7122\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5fhpx" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762599 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/99d83385-4586-40c5-af02-d293febdffc2-images\") pod \"machine-config-operator-74547568cd-fcmcn\" (UID: \"99d83385-4586-40c5-af02-d293febdffc2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fcmcn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762637 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6385779-f490-4523-8a93-5c043fb29f56-config\") pod \"kube-controller-manager-operator-78b949d7b-gxndj\" (UID: \"b6385779-f490-4523-8a93-5c043fb29f56\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gxndj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762639 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/96f87e1d-9edf-44a3-b884-789dd6a3f334-socket-dir\") pod \"csi-hostpathplugin-zzq5h\" (UID: \"96f87e1d-9edf-44a3-b884-789dd6a3f334\") " pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762663 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n78p9\" (UniqueName: \"kubernetes.io/projected/3bffe14a-0216-4854-b0fc-7c482a297b82-kube-api-access-n78p9\") pod \"marketplace-operator-79b997595-s96vn\" (UID: \"3bffe14a-0216-4854-b0fc-7c482a297b82\") " pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762714 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9w4b\" (UniqueName: \"kubernetes.io/projected/7c1d42f1-afc2-499f-9f51-aae97e6cc10a-kube-api-access-m9w4b\") pod \"olm-operator-6b444d44fb-vkdls\" (UID: \"7c1d42f1-afc2-499f-9f51-aae97e6cc10a\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vkdls" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762747 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762769 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/96f87e1d-9edf-44a3-b884-789dd6a3f334-plugins-dir\") pod \"csi-hostpathplugin-zzq5h\" (UID: \"96f87e1d-9edf-44a3-b884-789dd6a3f334\") " pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762790 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cdd0449c-7006-492f-90ff-5c7962dbe6f9-metrics-tls\") pod \"ingress-operator-5b745b69d9-s84zq\" (UID: \"cdd0449c-7006-492f-90ff-5c7962dbe6f9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-s84zq" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762818 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b6385779-f490-4523-8a93-5c043fb29f56-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-gxndj\" (UID: \"b6385779-f490-4523-8a93-5c043fb29f56\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gxndj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762852 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/943383ae-0bb8-44ef-91c8-25064ea7907f-apiservice-cert\") pod \"packageserver-d55dfcdfc-zdtxg\" (UID: \"943383ae-0bb8-44ef-91c8-25064ea7907f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762873 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/96f87e1d-9edf-44a3-b884-789dd6a3f334-mountpoint-dir\") pod \"csi-hostpathplugin-zzq5h\" (UID: \"96f87e1d-9edf-44a3-b884-789dd6a3f334\") " pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762901 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/943383ae-0bb8-44ef-91c8-25064ea7907f-tmpfs\") pod \"packageserver-d55dfcdfc-zdtxg\" (UID: \"943383ae-0bb8-44ef-91c8-25064ea7907f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762919 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f5e8635f-8114-4ba8-b74c-6dd667ef41df-metrics-certs\") pod \"router-default-5444994796-cqh2x\" (UID: \"f5e8635f-8114-4ba8-b74c-6dd667ef41df\") " pod="openshift-ingress/router-default-5444994796-cqh2x" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762949 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvzpq\" (UniqueName: \"kubernetes.io/projected/49c08401-a807-46f6-9aca-26e535cabae5-kube-api-access-xvzpq\") pod \"package-server-manager-789f6589d5-7bcl9\" (UID: \"49c08401-a807-46f6-9aca-26e535cabae5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7bcl9" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762966 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/97092ddc-d56d-49b0-b290-30f113972b43-signing-cabundle\") pod \"service-ca-9c57cc56f-nfzt8\" (UID: \"97092ddc-d56d-49b0-b290-30f113972b43\") " pod="openshift-service-ca/service-ca-9c57cc56f-nfzt8" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.762984 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/5d5134e9-c9b5-4959-8092-581e357e5ebf-etcd-ca\") pod \"etcd-operator-b45778765-c2fjv\" (UID: \"5d5134e9-c9b5-4959-8092-581e357e5ebf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.763526 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/5d5134e9-c9b5-4959-8092-581e357e5ebf-etcd-ca\") pod \"etcd-operator-b45778765-c2fjv\" (UID: \"5d5134e9-c9b5-4959-8092-581e357e5ebf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.764301 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cdd0449c-7006-492f-90ff-5c7962dbe6f9-trusted-ca\") pod \"ingress-operator-5b745b69d9-s84zq\" (UID: \"cdd0449c-7006-492f-90ff-5c7962dbe6f9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-s84zq" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.765143 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64f909e9-b00b-427e-9fe8-3ebf290d7676-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-dhhtj\" (UID: \"64f909e9-b00b-427e-9fe8-3ebf290d7676\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dhhtj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.765736 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88cf9601-400d-4dd7-91dd-c2ba110f1731-config\") pod \"service-ca-operator-777779d784-fzjz7\" (UID: \"88cf9601-400d-4dd7-91dd-c2ba110f1731\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fzjz7" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.766223 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d5134e9-c9b5-4959-8092-581e357e5ebf-serving-cert\") pod \"etcd-operator-b45778765-c2fjv\" (UID: \"5d5134e9-c9b5-4959-8092-581e357e5ebf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.766448 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3bffe14a-0216-4854-b0fc-7c482a297b82-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-s96vn\" (UID: \"3bffe14a-0216-4854-b0fc-7c482a297b82\") " pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.766940 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d5134e9-c9b5-4959-8092-581e357e5ebf-config\") pod \"etcd-operator-b45778765-c2fjv\" (UID: \"5d5134e9-c9b5-4959-8092-581e357e5ebf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.767468 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3bffe14a-0216-4854-b0fc-7c482a297b82-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-s96vn\" (UID: \"3bffe14a-0216-4854-b0fc-7c482a297b82\") " pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.768028 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/76adb7c0-2e4a-46a7-ac36-9d8eba267cab-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-27vx7\" (UID: \"76adb7c0-2e4a-46a7-ac36-9d8eba267cab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-27vx7" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.768920 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4fa1c8c4-4ea6-484c-906a-6e7c8016757b-config-volume\") pod \"collect-profiles-29500485-q5vqj\" (UID: \"4fa1c8c4-4ea6-484c-906a-6e7c8016757b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.769794 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f5e8635f-8114-4ba8-b74c-6dd667ef41df-service-ca-bundle\") pod \"router-default-5444994796-cqh2x\" (UID: \"f5e8635f-8114-4ba8-b74c-6dd667ef41df\") " pod="openshift-ingress/router-default-5444994796-cqh2x" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.772542 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/5d5134e9-c9b5-4959-8092-581e357e5ebf-etcd-service-ca\") pod \"etcd-operator-b45778765-c2fjv\" (UID: \"5d5134e9-c9b5-4959-8092-581e357e5ebf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" Feb 02 10:55:48 crc kubenswrapper[4838]: E0202 10:55:48.772816 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:49.272795481 +0000 UTC m=+143.609896629 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.773137 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/96f87e1d-9edf-44a3-b884-789dd6a3f334-csi-data-dir\") pod \"csi-hostpathplugin-zzq5h\" (UID: \"96f87e1d-9edf-44a3-b884-789dd6a3f334\") " pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.773305 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/96f87e1d-9edf-44a3-b884-789dd6a3f334-plugins-dir\") pod \"csi-hostpathplugin-zzq5h\" (UID: \"96f87e1d-9edf-44a3-b884-789dd6a3f334\") " pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.773790 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6385779-f490-4523-8a93-5c043fb29f56-config\") pod \"kube-controller-manager-operator-78b949d7b-gxndj\" (UID: \"b6385779-f490-4523-8a93-5c043fb29f56\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gxndj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.774156 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/99d83385-4586-40c5-af02-d293febdffc2-images\") pod \"machine-config-operator-74547568cd-fcmcn\" (UID: \"99d83385-4586-40c5-af02-d293febdffc2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fcmcn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.774460 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f245ed23-8fbf-49f9-a284-6580d1025ceb-config\") pod \"kube-apiserver-operator-766d6c64bb-9ssxs\" (UID: \"f245ed23-8fbf-49f9-a284-6580d1025ceb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9ssxs" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.774754 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f245ed23-8fbf-49f9-a284-6580d1025ceb-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-9ssxs\" (UID: \"f245ed23-8fbf-49f9-a284-6580d1025ceb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9ssxs" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.774779 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/96f87e1d-9edf-44a3-b884-789dd6a3f334-registration-dir\") pod \"csi-hostpathplugin-zzq5h\" (UID: \"96f87e1d-9edf-44a3-b884-789dd6a3f334\") " pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.775400 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/96f87e1d-9edf-44a3-b884-789dd6a3f334-mountpoint-dir\") pod \"csi-hostpathplugin-zzq5h\" (UID: \"96f87e1d-9edf-44a3-b884-789dd6a3f334\") " pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.775510 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/97092ddc-d56d-49b0-b290-30f113972b43-signing-cabundle\") pod \"service-ca-9c57cc56f-nfzt8\" (UID: \"97092ddc-d56d-49b0-b290-30f113972b43\") " pod="openshift-service-ca/service-ca-9c57cc56f-nfzt8" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.775703 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7dc10e03-1a96-49b1-b3d4-2126af30e87b-config-volume\") pod \"dns-default-sz4zx\" (UID: \"7dc10e03-1a96-49b1-b3d4-2126af30e87b\") " pod="openshift-dns/dns-default-sz4zx" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.775916 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/99d83385-4586-40c5-af02-d293febdffc2-auth-proxy-config\") pod \"machine-config-operator-74547568cd-fcmcn\" (UID: \"99d83385-4586-40c5-af02-d293febdffc2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fcmcn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.777258 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h2pj9" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.784658 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/943383ae-0bb8-44ef-91c8-25064ea7907f-tmpfs\") pod \"packageserver-d55dfcdfc-zdtxg\" (UID: \"943383ae-0bb8-44ef-91c8-25064ea7907f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.787265 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cd169950-cc5a-4c99-ae5f-9258e0164f7a-cert\") pod \"ingress-canary-5pw6k\" (UID: \"cd169950-cc5a-4c99-ae5f-9258e0164f7a\") " pod="openshift-ingress-canary/ingress-canary-5pw6k" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.787873 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/97092ddc-d56d-49b0-b290-30f113972b43-signing-key\") pod \"service-ca-9c57cc56f-nfzt8\" (UID: \"97092ddc-d56d-49b0-b290-30f113972b43\") " pod="openshift-service-ca/service-ca-9c57cc56f-nfzt8" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.788775 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9cc33e8c-fb5d-4006-be95-170ec3d739e9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-p64qh\" (UID: \"9cc33e8c-fb5d-4006-be95-170ec3d739e9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p64qh" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.789871 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5d5134e9-c9b5-4959-8092-581e357e5ebf-etcd-client\") pod \"etcd-operator-b45778765-c2fjv\" (UID: \"5d5134e9-c9b5-4959-8092-581e357e5ebf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.794138 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/943383ae-0bb8-44ef-91c8-25064ea7907f-apiservice-cert\") pod \"packageserver-d55dfcdfc-zdtxg\" (UID: \"943383ae-0bb8-44ef-91c8-25064ea7907f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.799750 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-4746f"] Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.801341 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0adb948a-923d-44f9-8cad-f36fe04a90b2-bound-sa-token\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.809377 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f27a4923-82be-471e-8b65-a3c325f389bc-srv-cert\") pod \"catalog-operator-68c6474976-bdxgd\" (UID: \"f27a4923-82be-471e-8b65-a3c325f389bc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdxgd" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.811214 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/f5e8635f-8114-4ba8-b74c-6dd667ef41df-default-certificate\") pod \"router-default-5444994796-cqh2x\" (UID: \"f5e8635f-8114-4ba8-b74c-6dd667ef41df\") " pod="openshift-ingress/router-default-5444994796-cqh2x" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.811265 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f5e8635f-8114-4ba8-b74c-6dd667ef41df-metrics-certs\") pod \"router-default-5444994796-cqh2x\" (UID: \"f5e8635f-8114-4ba8-b74c-6dd667ef41df\") " pod="openshift-ingress/router-default-5444994796-cqh2x" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.811555 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64f909e9-b00b-427e-9fe8-3ebf290d7676-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-dhhtj\" (UID: \"64f909e9-b00b-427e-9fe8-3ebf290d7676\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dhhtj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.812302 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cdd0449c-7006-492f-90ff-5c7962dbe6f9-metrics-tls\") pod \"ingress-operator-5b745b69d9-s84zq\" (UID: \"cdd0449c-7006-492f-90ff-5c7962dbe6f9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-s84zq" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.821452 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-l9qxf"] Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.825197 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88cf9601-400d-4dd7-91dd-c2ba110f1731-serving-cert\") pod \"service-ca-operator-777779d784-fzjz7\" (UID: \"88cf9601-400d-4dd7-91dd-c2ba110f1731\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fzjz7" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.826068 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7dc10e03-1a96-49b1-b3d4-2126af30e87b-metrics-tls\") pod \"dns-default-sz4zx\" (UID: \"7dc10e03-1a96-49b1-b3d4-2126af30e87b\") " pod="openshift-dns/dns-default-sz4zx" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.827132 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-t2zxd"] Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.829183 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/99d83385-4586-40c5-af02-d293febdffc2-proxy-tls\") pod \"machine-config-operator-74547568cd-fcmcn\" (UID: \"99d83385-4586-40c5-af02-d293febdffc2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fcmcn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.829215 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/943383ae-0bb8-44ef-91c8-25064ea7907f-webhook-cert\") pod \"packageserver-d55dfcdfc-zdtxg\" (UID: \"943383ae-0bb8-44ef-91c8-25064ea7907f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.829253 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f27a4923-82be-471e-8b65-a3c325f389bc-profile-collector-cert\") pod \"catalog-operator-68c6474976-bdxgd\" (UID: \"f27a4923-82be-471e-8b65-a3c325f389bc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdxgd" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.829341 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7c1d42f1-afc2-499f-9f51-aae97e6cc10a-profile-collector-cert\") pod \"olm-operator-6b444d44fb-vkdls\" (UID: \"7c1d42f1-afc2-499f-9f51-aae97e6cc10a\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vkdls" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.829732 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/93c46aec-8376-48e6-9b7c-fd0ef58bfd91-certs\") pod \"machine-config-server-s7pcn\" (UID: \"93c46aec-8376-48e6-9b7c-fd0ef58bfd91\") " pod="openshift-machine-config-operator/machine-config-server-s7pcn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.829830 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/f5e8635f-8114-4ba8-b74c-6dd667ef41df-stats-auth\") pod \"router-default-5444994796-cqh2x\" (UID: \"f5e8635f-8114-4ba8-b74c-6dd667ef41df\") " pod="openshift-ingress/router-default-5444994796-cqh2x" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.830269 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4fa1c8c4-4ea6-484c-906a-6e7c8016757b-secret-volume\") pod \"collect-profiles-29500485-q5vqj\" (UID: \"4fa1c8c4-4ea6-484c-906a-6e7c8016757b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.830366 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9d7w\" (UniqueName: \"kubernetes.io/projected/3e879c3f-8c95-449b-b9e7-439c78f48209-kube-api-access-v9d7w\") pod \"console-f9d7485db-2qsm5\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.830472 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7c1d42f1-afc2-499f-9f51-aae97e6cc10a-srv-cert\") pod \"olm-operator-6b444d44fb-vkdls\" (UID: \"7c1d42f1-afc2-499f-9f51-aae97e6cc10a\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vkdls" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.830790 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzzk9\" (UniqueName: \"kubernetes.io/projected/0adb948a-923d-44f9-8cad-f36fe04a90b2-kube-api-access-hzzk9\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.832673 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b6385779-f490-4523-8a93-5c043fb29f56-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-gxndj\" (UID: \"b6385779-f490-4523-8a93-5c043fb29f56\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gxndj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.834835 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/93c46aec-8376-48e6-9b7c-fd0ef58bfd91-node-bootstrap-token\") pod \"machine-config-server-s7pcn\" (UID: \"93c46aec-8376-48e6-9b7c-fd0ef58bfd91\") " pod="openshift-machine-config-operator/machine-config-server-s7pcn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.841538 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g"] Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.843474 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c08401-a807-46f6-9aca-26e535cabae5-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-7bcl9\" (UID: \"49c08401-a807-46f6-9aca-26e535cabae5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7bcl9" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.851667 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztrjl\" (UniqueName: \"kubernetes.io/projected/76adb7c0-2e4a-46a7-ac36-9d8eba267cab-kube-api-access-ztrjl\") pod \"cluster-image-registry-operator-dc59b4c8b-27vx7\" (UID: \"76adb7c0-2e4a-46a7-ac36-9d8eba267cab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-27vx7" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.863734 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:48 crc kubenswrapper[4838]: E0202 10:55:48.864344 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:49.364329444 +0000 UTC m=+143.701430472 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.885146 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjdft\" (UniqueName: \"kubernetes.io/projected/64f909e9-b00b-427e-9fe8-3ebf290d7676-kube-api-access-sjdft\") pod \"kube-storage-version-migrator-operator-b67b599dd-dhhtj\" (UID: \"64f909e9-b00b-427e-9fe8-3ebf290d7676\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dhhtj" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.896105 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-2l7rj"] Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.914765 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxfq8\" (UniqueName: \"kubernetes.io/projected/f27a4923-82be-471e-8b65-a3c325f389bc-kube-api-access-cxfq8\") pod \"catalog-operator-68c6474976-bdxgd\" (UID: \"f27a4923-82be-471e-8b65-a3c325f389bc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdxgd" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.923724 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9l8bc\" (UniqueName: \"kubernetes.io/projected/cdd0449c-7006-492f-90ff-5c7962dbe6f9-kube-api-access-9l8bc\") pod \"ingress-operator-5b745b69d9-s84zq\" (UID: \"cdd0449c-7006-492f-90ff-5c7962dbe6f9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-s84zq" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.956211 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6d8v\" (UniqueName: \"kubernetes.io/projected/cd169950-cc5a-4c99-ae5f-9258e0164f7a-kube-api-access-c6d8v\") pod \"ingress-canary-5pw6k\" (UID: \"cd169950-cc5a-4c99-ae5f-9258e0164f7a\") " pod="openshift-ingress-canary/ingress-canary-5pw6k" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.965663 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:48 crc kubenswrapper[4838]: E0202 10:55:48.966068 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:49.466056015 +0000 UTC m=+143.803157043 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.967182 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n78p9\" (UniqueName: \"kubernetes.io/projected/3bffe14a-0216-4854-b0fc-7c482a297b82-kube-api-access-n78p9\") pod \"marketplace-operator-79b997595-s96vn\" (UID: \"3bffe14a-0216-4854-b0fc-7c482a297b82\") " pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.974179 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-s7xdh" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.988086 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmstl\" (UniqueName: \"kubernetes.io/projected/88cf9601-400d-4dd7-91dd-c2ba110f1731-kube-api-access-jmstl\") pod \"service-ca-operator-777779d784-fzjz7\" (UID: \"88cf9601-400d-4dd7-91dd-c2ba110f1731\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fzjz7" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.989965 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:48 crc kubenswrapper[4838]: I0202 10:55:48.997237 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-mzp9s" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.003707 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cdd0449c-7006-492f-90ff-5c7962dbe6f9-bound-sa-token\") pod \"ingress-operator-5b745b69d9-s84zq\" (UID: \"cdd0449c-7006-492f-90ff-5c7962dbe6f9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-s84zq" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.007652 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.027302 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bs69m\" (UniqueName: \"kubernetes.io/projected/96f87e1d-9edf-44a3-b884-789dd6a3f334-kube-api-access-bs69m\") pod \"csi-hostpathplugin-zzq5h\" (UID: \"96f87e1d-9edf-44a3-b884-789dd6a3f334\") " pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.030152 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-5pw6k" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.046202 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2smwm\" (UniqueName: \"kubernetes.io/projected/943383ae-0bb8-44ef-91c8-25064ea7907f-kube-api-access-2smwm\") pod \"packageserver-d55dfcdfc-zdtxg\" (UID: \"943383ae-0bb8-44ef-91c8-25064ea7907f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.066327 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:49 crc kubenswrapper[4838]: E0202 10:55:49.067942 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:49.567921651 +0000 UTC m=+143.905022679 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.070179 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwmmk\" (UniqueName: \"kubernetes.io/projected/97092ddc-d56d-49b0-b290-30f113972b43-kube-api-access-hwmmk\") pod \"service-ca-9c57cc56f-nfzt8\" (UID: \"97092ddc-d56d-49b0-b290-30f113972b43\") " pod="openshift-service-ca/service-ca-9c57cc56f-nfzt8" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.083192 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pz8ft\" (UniqueName: \"kubernetes.io/projected/7dc10e03-1a96-49b1-b3d4-2126af30e87b-kube-api-access-pz8ft\") pod \"dns-default-sz4zx\" (UID: \"7dc10e03-1a96-49b1-b3d4-2126af30e87b\") " pod="openshift-dns/dns-default-sz4zx" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.099277 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-27vx7" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.100972 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h2pj9"] Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.101208 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dhhtj" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.106230 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g25l9\" (UniqueName: \"kubernetes.io/projected/93c46aec-8376-48e6-9b7c-fd0ef58bfd91-kube-api-access-g25l9\") pod \"machine-config-server-s7pcn\" (UID: \"93c46aec-8376-48e6-9b7c-fd0ef58bfd91\") " pod="openshift-machine-config-operator/machine-config-server-s7pcn" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.124014 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbxvc\" (UniqueName: \"kubernetes.io/projected/9cc33e8c-fb5d-4006-be95-170ec3d739e9-kube-api-access-bbxvc\") pod \"multus-admission-controller-857f4d67dd-p64qh\" (UID: \"9cc33e8c-fb5d-4006-be95-170ec3d739e9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p64qh" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.130263 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-p64qh" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.137533 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-fzjz7" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.142436 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdxgd" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.143070 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pj2f\" (UniqueName: \"kubernetes.io/projected/65691c3c-b0b5-4460-a1d5-f64043ce7122-kube-api-access-7pj2f\") pod \"migrator-59844c95c7-5fhpx\" (UID: \"65691c3c-b0b5-4460-a1d5-f64043ce7122\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5fhpx" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.143330 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-j52s8"] Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.156507 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-s84zq" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.163981 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9z49x\" (UniqueName: \"kubernetes.io/projected/5d5134e9-c9b5-4959-8092-581e357e5ebf-kube-api-access-9z49x\") pod \"etcd-operator-b45778765-c2fjv\" (UID: \"5d5134e9-c9b5-4959-8092-581e357e5ebf\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.165854 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.168704 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:49 crc kubenswrapper[4838]: E0202 10:55:49.177843 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:49.677826612 +0000 UTC m=+144.014927640 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.181585 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ts5pt\" (UniqueName: \"kubernetes.io/projected/99d83385-4586-40c5-af02-d293febdffc2-kube-api-access-ts5pt\") pod \"machine-config-operator-74547568cd-fcmcn\" (UID: \"99d83385-4586-40c5-af02-d293febdffc2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fcmcn" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.205887 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5fhpx" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.212288 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9w4b\" (UniqueName: \"kubernetes.io/projected/7c1d42f1-afc2-499f-9f51-aae97e6cc10a-kube-api-access-m9w4b\") pod \"olm-operator-6b444d44fb-vkdls\" (UID: \"7c1d42f1-afc2-499f-9f51-aae97e6cc10a\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vkdls" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.223062 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fcmcn" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.228189 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f245ed23-8fbf-49f9-a284-6580d1025ceb-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-9ssxs\" (UID: \"f245ed23-8fbf-49f9-a284-6580d1025ceb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9ssxs" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.237374 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-nfzt8" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.241969 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2l5n9\" (UniqueName: \"kubernetes.io/projected/4fa1c8c4-4ea6-484c-906a-6e7c8016757b-kube-api-access-2l5n9\") pod \"collect-profiles-29500485-q5vqj\" (UID: \"4fa1c8c4-4ea6-484c-906a-6e7c8016757b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.251059 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vkdls" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.254549 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-dtdwq"] Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.258300 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.270737 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.271287 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg" Feb 02 10:55:49 crc kubenswrapper[4838]: E0202 10:55:49.271466 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:49.771442406 +0000 UTC m=+144.108543434 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.271967 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:49 crc kubenswrapper[4838]: E0202 10:55:49.272260 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:49.77225153 +0000 UTC m=+144.109352558 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.281718 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvzpq\" (UniqueName: \"kubernetes.io/projected/49c08401-a807-46f6-9aca-26e535cabae5-kube-api-access-xvzpq\") pod \"package-server-manager-789f6589d5-7bcl9\" (UID: \"49c08401-a807-46f6-9aca-26e535cabae5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7bcl9" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.285707 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v86mv\" (UniqueName: \"kubernetes.io/projected/f5e8635f-8114-4ba8-b74c-6dd667ef41df-kube-api-access-v86mv\") pod \"router-default-5444994796-cqh2x\" (UID: \"f5e8635f-8114-4ba8-b74c-6dd667ef41df\") " pod="openshift-ingress/router-default-5444994796-cqh2x" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.303806 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b6385779-f490-4523-8a93-5c043fb29f56-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-gxndj\" (UID: \"b6385779-f490-4523-8a93-5c043fb29f56\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gxndj" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.304076 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.317952 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-s7pcn" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.323454 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-s7xdh"] Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.336714 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-sz4zx" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.340254 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-j52s8" event={"ID":"37fe135e-daf4-4d19-9ca6-a33ca7174222","Type":"ContainerStarted","Data":"1388dca5d8322ea5dd5edb34ec345735a1ef0459df6b5236f1b6740431848c63"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.361659 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" event={"ID":"4186f5d8-e330-4fae-943e-a6abbdb49b96","Type":"ContainerStarted","Data":"b6b94727c07dd011f7d4b486577bb84341aefe63d3ac848d3413a794de889e98"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.367586 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" event={"ID":"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37","Type":"ContainerStarted","Data":"489befa8799461aa0ce285403b791a2f9ec4b0d1f12d04349d14b9bd047b578f"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.371306 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-l9qxf" event={"ID":"bc3ee51b-3741-4106-9785-3d2b572ee205","Type":"ContainerStarted","Data":"0a726bdbaf19cc333c692439dd921323470f322ef1d67782d0b9582f3b663c7f"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.372747 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:49 crc kubenswrapper[4838]: E0202 10:55:49.373424 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:49.873378624 +0000 UTC m=+144.210479702 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.374018 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-t2zxd" event={"ID":"d74d39da-6fcf-437c-805b-ec416d09e348","Type":"ContainerStarted","Data":"465361fcbe4e6ef930b1a6e4d0fbd438034a971220fd2833cf41326ddf1253ec"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.374046 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-t2zxd" event={"ID":"d74d39da-6fcf-437c-805b-ec416d09e348","Type":"ContainerStarted","Data":"d4e366719a4464a49f6dd2a8849c4845c7ca301218f211276638332d02dad605"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.403382 4838 generic.go:334] "Generic (PLEG): container finished" podID="03d2fa95-b476-4259-8d2d-69bd31c28da4" containerID="cb39e09671ba719fa01ec5d1dc1dad667ed6fee77530bf9a58dd99b4984f7b9c" exitCode=0 Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.404639 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dmwvh" event={"ID":"03d2fa95-b476-4259-8d2d-69bd31c28da4","Type":"ContainerDied","Data":"cb39e09671ba719fa01ec5d1dc1dad667ed6fee77530bf9a58dd99b4984f7b9c"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.404674 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dmwvh" event={"ID":"03d2fa95-b476-4259-8d2d-69bd31c28da4","Type":"ContainerStarted","Data":"46272fc5b3b724602387508905dae55e95044b7b5ae2d91623ffe2b3bf4f63d5"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.427153 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hxwxd" event={"ID":"634059df-4860-4872-9aae-3b71aa2d55b2","Type":"ContainerStarted","Data":"14e6b20691f2f92a33a17c17020eb8bf6281714ecc8f2fec2ff698dd6cba6f66"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.427210 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hxwxd" event={"ID":"634059df-4860-4872-9aae-3b71aa2d55b2","Type":"ContainerStarted","Data":"d13316ef5f4a2b604714a1a366b7ac21583a98615a16b439ac8ebfd03f8b40f8"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.451274 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-cqh2x" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.453776 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4746f" event={"ID":"6e542c14-1264-46b8-92ab-bc74484549bf","Type":"ContainerStarted","Data":"ee900fd0a364cefe16092c393db8362e148928313e5df44e330ce33964a737c1"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.453813 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4746f" event={"ID":"6e542c14-1264-46b8-92ab-bc74484549bf","Type":"ContainerStarted","Data":"3f34f748dc4b02f77dbc8365b9f41dd65df06b69f9c13e528d423069df0f7de1"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.466941 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" event={"ID":"db54cce9-ff9d-4772-abf3-01f15ecb8075","Type":"ContainerStarted","Data":"37b6c8c5c9804f2ddcfcf2a242353706afa270f96a916bdf65e4253961bc63cb"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.466994 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" event={"ID":"db54cce9-ff9d-4772-abf3-01f15ecb8075","Type":"ContainerStarted","Data":"2a9efacab3eb58e6c1129fd92b58c41f9202f07dcb955d4e14bf300156f4bb3f"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.467900 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.472453 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h2pj9" event={"ID":"d24e9722-41b4-4e18-9e40-204ac7e1a67e","Type":"ContainerStarted","Data":"3feb2f91b43ff0ef4f578c3f35e5a5af1ad894238a819185af04e5303e3bc8a9"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.474925 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:49 crc kubenswrapper[4838]: E0202 10:55:49.475197 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:49.975186438 +0000 UTC m=+144.312287466 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.483587 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gxndj" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.483659 4838 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-w48jn container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.483704 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" podUID="db54cce9-ff9d-4772-abf3-01f15ecb8075" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.484241 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9ssxs" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.484889 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5nmhg" event={"ID":"eb0d3aa3-09b5-4b68-833d-03218e1794f0","Type":"ContainerStarted","Data":"17a3e2f2dc8690db8c95efca185a6bdc123882bf6b6a5fe39c7975fe60345633"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.484987 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5nmhg" event={"ID":"eb0d3aa3-09b5-4b68-833d-03218e1794f0","Type":"ContainerStarted","Data":"1180241841c15cb80048e4d8c295bbc5ba87f9af56cd971c68d1c29eef3ca615"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.493053 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" event={"ID":"92924f81-e588-47e1-84d1-766c9774f6d1","Type":"ContainerStarted","Data":"ff4739a8a737120517fdd5fdfee34e7b4f5b672dd3fa24c74ac417bdb3ea9476"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.493096 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" event={"ID":"92924f81-e588-47e1-84d1-766c9774f6d1","Type":"ContainerStarted","Data":"d313cf6afd43dd1de7c3c3dfc38b0bb2788b5f5836deb354943afe4e8538d1de"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.493651 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.501488 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7bcl9" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.502002 4838 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-z6khj container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.502028 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" podUID="92924f81-e588-47e1-84d1-766c9774f6d1" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.502153 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh59q" event={"ID":"5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa","Type":"ContainerStarted","Data":"b9728a93bc752d3a1f5a707ca373755af53d011b886b6c3f444a1e7445f2dc7c"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.502197 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh59q" event={"ID":"5dfe17d8-ed0b-4308-a9c5-8abf6e5371fa","Type":"ContainerStarted","Data":"07666a48e54dc767469e32cb4a2b1b9e4ca350fad110c284fee4ccd40668608c"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.504830 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-v475b" event={"ID":"b49e2d2f-5155-49bf-82f3-b68992ebe787","Type":"ContainerStarted","Data":"f6feb19ddff44a8117e56058ec23e7ab9947d8a3f3bfc1ef722343266b0e3e84"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.504876 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-v475b" event={"ID":"b49e2d2f-5155-49bf-82f3-b68992ebe787","Type":"ContainerStarted","Data":"66f833ec2771f390fc3bb14a5952821f8d6b026e04f22d4a15d1a95a50d04581"} Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.504889 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-v475b" event={"ID":"b49e2d2f-5155-49bf-82f3-b68992ebe787","Type":"ContainerStarted","Data":"a755a77df128af579719e8d024bc93c48454ca96b7bfd292ed46cd0b1731ca30"} Feb 02 10:55:49 crc kubenswrapper[4838]: W0202 10:55:49.526566 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod93c46aec_8376_48e6_9b7c_fd0ef58bfd91.slice/crio-c2eb840e9fe69f893af67d1a081c09c941b1526c53a760f6f6d9e652425333c8 WatchSource:0}: Error finding container c2eb840e9fe69f893af67d1a081c09c941b1526c53a760f6f6d9e652425333c8: Status 404 returned error can't find the container with id c2eb840e9fe69f893af67d1a081c09c941b1526c53a760f6f6d9e652425333c8 Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.531881 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.576410 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:49 crc kubenswrapper[4838]: E0202 10:55:49.576585 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:50.076557969 +0000 UTC m=+144.413658997 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.577403 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:49 crc kubenswrapper[4838]: E0202 10:55:49.579321 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:50.07930864 +0000 UTC m=+144.416409668 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.591659 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-zfr2j"] Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.591702 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-mzp9s"] Feb 02 10:55:49 crc kubenswrapper[4838]: W0202 10:55:49.646879 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poded8b29bf_e322_4cc9_b027_0aea680ce349.slice/crio-1f751eb54e2967cb8ced9b013b2208ad442780a0f39656def155f26b0eb97958 WatchSource:0}: Error finding container 1f751eb54e2967cb8ced9b013b2208ad442780a0f39656def155f26b0eb97958: Status 404 returned error can't find the container with id 1f751eb54e2967cb8ced9b013b2208ad442780a0f39656def155f26b0eb97958 Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.678678 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:49 crc kubenswrapper[4838]: E0202 10:55:49.678974 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:50.1789346 +0000 UTC m=+144.516035628 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.679146 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:49 crc kubenswrapper[4838]: E0202 10:55:49.679467 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:50.179460485 +0000 UTC m=+144.516561513 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.736154 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-2qsm5"] Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.750687 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dhhtj"] Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.780168 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:49 crc kubenswrapper[4838]: E0202 10:55:49.780491 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:50.280476146 +0000 UTC m=+144.617577174 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.846938 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-t2zxd" podStartSLOduration=119.846921414 podStartE2EDuration="1m59.846921414s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:49.846311876 +0000 UTC m=+144.183412924" watchObservedRunningTime="2026-02-02 10:55:49.846921414 +0000 UTC m=+144.184022452" Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.864084 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-5pw6k"] Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.889532 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:49 crc kubenswrapper[4838]: E0202 10:55:49.889906 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:50.389894363 +0000 UTC m=+144.726995391 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.904783 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-fzjz7"] Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.907302 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdxgd"] Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.907346 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-5fhpx"] Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.932788 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-27vx7"] Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.934069 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-p64qh"] Feb 02 10:55:49 crc kubenswrapper[4838]: I0202 10:55:49.991199 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:49 crc kubenswrapper[4838]: E0202 10:55:49.991518 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:50.491500371 +0000 UTC m=+144.828601399 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:50 crc kubenswrapper[4838]: W0202 10:55:50.028469 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcd169950_cc5a_4c99_ae5f_9258e0164f7a.slice/crio-7146bb7834742abf3ffd6b4a50121dfbbd483c7d67266cf307df1abebb28ee07 WatchSource:0}: Error finding container 7146bb7834742abf3ffd6b4a50121dfbbd483c7d67266cf307df1abebb28ee07: Status 404 returned error can't find the container with id 7146bb7834742abf3ffd6b4a50121dfbbd483c7d67266cf307df1abebb28ee07 Feb 02 10:55:50 crc kubenswrapper[4838]: W0202 10:55:50.047788 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod88cf9601_400d_4dd7_91dd_c2ba110f1731.slice/crio-be9908acf6c037340791e52ab11329dd4025a3b166c4dfee3d4e4d92bf65c998 WatchSource:0}: Error finding container be9908acf6c037340791e52ab11329dd4025a3b166c4dfee3d4e4d92bf65c998: Status 404 returned error can't find the container with id be9908acf6c037340791e52ab11329dd4025a3b166c4dfee3d4e4d92bf65c998 Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.091789 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" podStartSLOduration=119.09177116 podStartE2EDuration="1m59.09177116s" podCreationTimestamp="2026-02-02 10:53:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:50.086050553 +0000 UTC m=+144.423151581" watchObservedRunningTime="2026-02-02 10:55:50.09177116 +0000 UTC m=+144.428872188" Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.092849 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-s84zq"] Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.092854 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:50 crc kubenswrapper[4838]: E0202 10:55:50.093174 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:50.593159061 +0000 UTC m=+144.930260159 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.194092 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:50 crc kubenswrapper[4838]: E0202 10:55:50.194380 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:50.694356497 +0000 UTC m=+145.031457525 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.194467 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:50 crc kubenswrapper[4838]: E0202 10:55:50.194876 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:50.694849291 +0000 UTC m=+145.031950319 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:50 crc kubenswrapper[4838]: W0202 10:55:50.200027 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod76adb7c0_2e4a_46a7_ac36_9d8eba267cab.slice/crio-f63b272b103424e89a37f0caf21cbfd9590c8f74c965f93c832bd94808d43dfc WatchSource:0}: Error finding container f63b272b103424e89a37f0caf21cbfd9590c8f74c965f93c832bd94808d43dfc: Status 404 returned error can't find the container with id f63b272b103424e89a37f0caf21cbfd9590c8f74c965f93c832bd94808d43dfc Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.206461 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-c2fjv"] Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.234804 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vkdls"] Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.251177 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg"] Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.265294 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-fcmcn"] Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.265732 4838 csr.go:261] certificate signing request csr-m4fbd is approved, waiting to be issued Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.279259 4838 csr.go:257] certificate signing request csr-m4fbd is issued Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.295128 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:50 crc kubenswrapper[4838]: E0202 10:55:50.295275 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:50.795259675 +0000 UTC m=+145.132360703 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.317883 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vh59q" podStartSLOduration=120.317871627 podStartE2EDuration="2m0.317871627s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:50.316729404 +0000 UTC m=+144.653830452" watchObservedRunningTime="2026-02-02 10:55:50.317871627 +0000 UTC m=+144.654972655" Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.396589 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:50 crc kubenswrapper[4838]: E0202 10:55:50.397079 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:50.897063118 +0000 UTC m=+145.234164146 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.398139 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5nmhg" podStartSLOduration=120.398125549 podStartE2EDuration="2m0.398125549s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:50.397962895 +0000 UTC m=+144.735063933" watchObservedRunningTime="2026-02-02 10:55:50.398125549 +0000 UTC m=+144.735226577" Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.407020 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-zzq5h"] Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.418731 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj"] Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.418766 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gxndj"] Feb 02 10:55:50 crc kubenswrapper[4838]: W0202 10:55:50.420352 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod99d83385_4586_40c5_af02_d293febdffc2.slice/crio-6bf740885ab86b08923957c916c1d3619790dce961dbb3c55a8fbc5d86c4296f WatchSource:0}: Error finding container 6bf740885ab86b08923957c916c1d3619790dce961dbb3c55a8fbc5d86c4296f: Status 404 returned error can't find the container with id 6bf740885ab86b08923957c916c1d3619790dce961dbb3c55a8fbc5d86c4296f Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.431783 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-sz4zx"] Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.433981 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s96vn"] Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.448676 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7bcl9"] Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.449222 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" podStartSLOduration=120.449202887 podStartE2EDuration="2m0.449202887s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:50.443088027 +0000 UTC m=+144.780189065" watchObservedRunningTime="2026-02-02 10:55:50.449202887 +0000 UTC m=+144.786303915" Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.497353 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:50 crc kubenswrapper[4838]: E0202 10:55:50.497542 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:50.997519903 +0000 UTC m=+145.334620931 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.497710 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:50 crc kubenswrapper[4838]: E0202 10:55:50.498039 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:50.998030838 +0000 UTC m=+145.335131866 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.515262 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-s7pcn" event={"ID":"93c46aec-8376-48e6-9b7c-fd0ef58bfd91","Type":"ContainerStarted","Data":"c2eb840e9fe69f893af67d1a081c09c941b1526c53a760f6f6d9e652425333c8"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.516474 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fcmcn" event={"ID":"99d83385-4586-40c5-af02-d293febdffc2","Type":"ContainerStarted","Data":"6bf740885ab86b08923957c916c1d3619790dce961dbb3c55a8fbc5d86c4296f"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.516991 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9ssxs"] Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.517929 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dhhtj" event={"ID":"64f909e9-b00b-427e-9fe8-3ebf290d7676","Type":"ContainerStarted","Data":"d7ea2ed7a4fe31e98c78804186a670aeb8c2d51ddc34e34e4a0f94dd9644d4b7"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.529568 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-nfzt8"] Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.530898 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-27vx7" event={"ID":"76adb7c0-2e4a-46a7-ac36-9d8eba267cab","Type":"ContainerStarted","Data":"f63b272b103424e89a37f0caf21cbfd9590c8f74c965f93c832bd94808d43dfc"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.532193 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-mzp9s" event={"ID":"ed8b29bf-e322-4cc9-b027-0aea680ce349","Type":"ContainerStarted","Data":"1f751eb54e2967cb8ced9b013b2208ad442780a0f39656def155f26b0eb97958"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.533207 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2qsm5" event={"ID":"3e879c3f-8c95-449b-b9e7-439c78f48209","Type":"ContainerStarted","Data":"3cc858cf1908eeb91710e318977d330c13069db13cfad51f3b7cb2281ef66c37"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.534096 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5fhpx" event={"ID":"65691c3c-b0b5-4460-a1d5-f64043ce7122","Type":"ContainerStarted","Data":"156208e966355f7b1b66fbdad11708b1b8e30f5d306b0f92b724bfee7f15b351"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.534834 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" event={"ID":"5d5134e9-c9b5-4959-8092-581e357e5ebf","Type":"ContainerStarted","Data":"f6468fcebf2f426f9f817d57749b2171c6ce9fc7e28bfa776e851a113ac9f71e"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.535562 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-5pw6k" event={"ID":"cd169950-cc5a-4c99-ae5f-9258e0164f7a","Type":"ContainerStarted","Data":"7146bb7834742abf3ffd6b4a50121dfbbd483c7d67266cf307df1abebb28ee07"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.536256 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" event={"ID":"b5aa31dc-369f-427e-97a4-b6245df5df4f","Type":"ContainerStarted","Data":"6b7ccc1ae01123d1ef083271116f4ce98bbf555d8b7e8bd51ea9c8a14932e683"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.536910 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-s84zq" event={"ID":"cdd0449c-7006-492f-90ff-5c7962dbe6f9","Type":"ContainerStarted","Data":"7b07607016d455ac86d19c35fbf96a504800834407dca55ea4a3b34563a2fafc"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.537518 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-s7xdh" event={"ID":"ec1b1633-82ef-481f-baac-8bb589265b21","Type":"ContainerStarted","Data":"868d7515563d9018254f01e83a2f1587210182b589815ee175d5a3bfe7550240"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.538660 4838 generic.go:334] "Generic (PLEG): container finished" podID="4186f5d8-e330-4fae-943e-a6abbdb49b96" containerID="d6c22f76f6b6d03b45a83be300fed1b71491f18fbdbfd5aa87851ae322b41505" exitCode=0 Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.538703 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" event={"ID":"4186f5d8-e330-4fae-943e-a6abbdb49b96","Type":"ContainerDied","Data":"d6c22f76f6b6d03b45a83be300fed1b71491f18fbdbfd5aa87851ae322b41505"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.540060 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdxgd" event={"ID":"f27a4923-82be-471e-8b65-a3c325f389bc","Type":"ContainerStarted","Data":"6d74c0af3940fcbfee30edcf877de6e240524888ce3f0da9b2df9060aeabec50"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.540736 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-cqh2x" event={"ID":"f5e8635f-8114-4ba8-b74c-6dd667ef41df","Type":"ContainerStarted","Data":"0a90d7be97821123c10ed7823239595e0f157908381b4460a9c6c0305d49ce55"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.541354 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg" event={"ID":"943383ae-0bb8-44ef-91c8-25064ea7907f","Type":"ContainerStarted","Data":"3da8d3c4941d0ba128166485e06bd691b79892dc13e66e51390cf886f55d1531"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.543949 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-l9qxf" event={"ID":"bc3ee51b-3741-4106-9785-3d2b572ee205","Type":"ContainerStarted","Data":"e421d34b913fad0d1a870358e2bd755535c4c2412abf94e63428702ceaaa5ef5"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.545175 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vkdls" event={"ID":"7c1d42f1-afc2-499f-9f51-aae97e6cc10a","Type":"ContainerStarted","Data":"cf5e26ea5b1185e41e814363388af7030df03abaa8064a7e1de58d9ebc48861c"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.546049 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-fzjz7" event={"ID":"88cf9601-400d-4dd7-91dd-c2ba110f1731","Type":"ContainerStarted","Data":"be9908acf6c037340791e52ab11329dd4025a3b166c4dfee3d4e4d92bf65c998"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.547426 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-j52s8" event={"ID":"37fe135e-daf4-4d19-9ca6-a33ca7174222","Type":"ContainerStarted","Data":"6c4ff28ef50f9601588c24541f2862612bd3abedb945baa80812c5a702635f19"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.548300 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-dtdwq" event={"ID":"228a6edf-f91c-4f0f-8098-98831284e76c","Type":"ContainerStarted","Data":"54b8be37ff21921902d324d6cb211af1ded29fd163b1d40b4608cf499219799b"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.549634 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" event={"ID":"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37","Type":"ContainerStarted","Data":"cc6694aa46b51203a530a155da47ab5bf4aabd583443aabe9c2a3d08fad7a04f"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.549885 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.550659 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-p64qh" event={"ID":"9cc33e8c-fb5d-4006-be95-170ec3d739e9","Type":"ContainerStarted","Data":"5468987a0150fc1eb977773ec9e6ffc1e80ffb4ca190954bed0849a0a77c9e84"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.552043 4838 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-2l7rj container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.14:6443/healthz\": dial tcp 10.217.0.14:6443: connect: connection refused" start-of-body= Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.552084 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" podUID="5a08fba0-5dfa-4dc5-8cf9-aa4580244e37" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.14:6443/healthz\": dial tcp 10.217.0.14:6443: connect: connection refused" Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.552265 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4746f" event={"ID":"6e542c14-1264-46b8-92ab-bc74484549bf","Type":"ContainerStarted","Data":"e1aca1b8345cdc470cb3b99dafe98f5f326e4c3e1c914607aad69673afab0dac"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.554806 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dmwvh" event={"ID":"03d2fa95-b476-4259-8d2d-69bd31c28da4","Type":"ContainerStarted","Data":"09e0d1a3aeda0191446c53412c68b59dffd7874a76fcb131cde4a31d5178d735"} Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.554939 4838 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-w48jn container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.555022 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" podUID="db54cce9-ff9d-4772-abf3-01f15ecb8075" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.555125 4838 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-z6khj container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.555168 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" podUID="92924f81-e588-47e1-84d1-766c9774f6d1" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.598726 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:50 crc kubenswrapper[4838]: E0202 10:55:50.600431 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:51.100412799 +0000 UTC m=+145.437513827 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.700907 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:50 crc kubenswrapper[4838]: E0202 10:55:50.701561 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:51.201549723 +0000 UTC m=+145.538650751 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.800703 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-v475b" podStartSLOduration=120.800687919 podStartE2EDuration="2m0.800687919s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:50.767164416 +0000 UTC m=+145.104265454" watchObservedRunningTime="2026-02-02 10:55:50.800687919 +0000 UTC m=+145.137788937" Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.802231 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hxwxd" podStartSLOduration=120.802225064 podStartE2EDuration="2m0.802225064s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:50.80038266 +0000 UTC m=+145.137483688" watchObservedRunningTime="2026-02-02 10:55:50.802225064 +0000 UTC m=+145.139326092" Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.802876 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:50 crc kubenswrapper[4838]: E0202 10:55:50.803289 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:51.303272734 +0000 UTC m=+145.640373762 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:50 crc kubenswrapper[4838]: I0202 10:55:50.906731 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:50 crc kubenswrapper[4838]: E0202 10:55:50.907112 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:51.407099438 +0000 UTC m=+145.744200466 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.013252 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:51 crc kubenswrapper[4838]: E0202 10:55:51.013524 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:51.513505566 +0000 UTC m=+145.850606584 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.013678 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:51 crc kubenswrapper[4838]: E0202 10:55:51.014030 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:51.514021921 +0000 UTC m=+145.851122949 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.114659 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:51 crc kubenswrapper[4838]: E0202 10:55:51.115096 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:51.615081174 +0000 UTC m=+145.952182202 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.126078 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" podStartSLOduration=121.126061715 podStartE2EDuration="2m1.126061715s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:51.125692085 +0000 UTC m=+145.462793123" watchObservedRunningTime="2026-02-02 10:55:51.126061715 +0000 UTC m=+145.463162743" Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.132179 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4746f" podStartSLOduration=121.132167263 podStartE2EDuration="2m1.132167263s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:51.100096344 +0000 UTC m=+145.437197392" watchObservedRunningTime="2026-02-02 10:55:51.132167263 +0000 UTC m=+145.469268291" Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.217327 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:51 crc kubenswrapper[4838]: E0202 10:55:51.217679 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:51.717667059 +0000 UTC m=+146.054768087 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.280975 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-02-02 10:50:50 +0000 UTC, rotation deadline is 2026-11-25 13:00:50.595375083 +0000 UTC Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.281301 4838 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 7106h4m59.314076289s for next certificate rotation Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.318581 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:51 crc kubenswrapper[4838]: E0202 10:55:51.318959 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:51.818943048 +0000 UTC m=+146.156044076 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.419554 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:51 crc kubenswrapper[4838]: E0202 10:55:51.420582 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:51.920565996 +0000 UTC m=+146.257667024 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.524339 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:51 crc kubenswrapper[4838]: E0202 10:55:51.524548 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:52.024522413 +0000 UTC m=+146.361623441 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.524921 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:51 crc kubenswrapper[4838]: E0202 10:55:51.525223 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:52.025195753 +0000 UTC m=+146.362296781 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.568694 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" event={"ID":"96f87e1d-9edf-44a3-b884-789dd6a3f334","Type":"ContainerStarted","Data":"b4f25603d6b647c218dca9bfd8b42c751e8825dcf1c70b764188f37daf9aa0ff"} Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.580420 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg" event={"ID":"943383ae-0bb8-44ef-91c8-25064ea7907f","Type":"ContainerStarted","Data":"794597b68203d265e87c1737a1b600fef7d897789e36dbbb017397e00369eb95"} Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.581465 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg" Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.589545 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gxndj" event={"ID":"b6385779-f490-4523-8a93-5c043fb29f56","Type":"ContainerStarted","Data":"b7c00563ab51e7ff20520a97644b8520f7a7d4dad2da3fc73924452045a5aa69"} Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.591268 4838 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-zdtxg container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.40:5443/healthz\": dial tcp 10.217.0.40:5443: connect: connection refused" start-of-body= Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.591300 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg" podUID="943383ae-0bb8-44ef-91c8-25064ea7907f" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.40:5443/healthz\": dial tcp 10.217.0.40:5443: connect: connection refused" Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.600166 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg" podStartSLOduration=121.60015235 podStartE2EDuration="2m1.60015235s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:51.598716148 +0000 UTC m=+145.935817176" watchObservedRunningTime="2026-02-02 10:55:51.60015235 +0000 UTC m=+145.937253378" Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.626116 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:51 crc kubenswrapper[4838]: E0202 10:55:51.627433 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:52.127416109 +0000 UTC m=+146.464517137 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.649520 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h2pj9" event={"ID":"d24e9722-41b4-4e18-9e40-204ac7e1a67e","Type":"ContainerStarted","Data":"be0c765a18ac231b7cd127780a812af2bf74df0f1d732266f7942bffc8ec7850"} Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.664785 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-27vx7" event={"ID":"76adb7c0-2e4a-46a7-ac36-9d8eba267cab","Type":"ContainerStarted","Data":"2d2bf882846eb813b8bc19b463a9675236c89f7ba511bf1913764920679b736f"} Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.698164 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-5pw6k" event={"ID":"cd169950-cc5a-4c99-ae5f-9258e0164f7a","Type":"ContainerStarted","Data":"4c4442645e2d552ddee124436b87a340341e165444f203f7643abe5506822494"} Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.708235 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dhhtj" event={"ID":"64f909e9-b00b-427e-9fe8-3ebf290d7676","Type":"ContainerStarted","Data":"8ae304337b7d8d26ad79ad0e9bbd83fda314308682dbdbe81bf4520d0bd1e17e"} Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.710737 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h2pj9" podStartSLOduration=121.710715691 podStartE2EDuration="2m1.710715691s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:51.700497411 +0000 UTC m=+146.037598439" watchObservedRunningTime="2026-02-02 10:55:51.710715691 +0000 UTC m=+146.047816709" Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.729026 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:51 crc kubenswrapper[4838]: E0202 10:55:51.730902 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:52.230886882 +0000 UTC m=+146.567987910 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.737004 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" event={"ID":"3bffe14a-0216-4854-b0fc-7c482a297b82","Type":"ContainerStarted","Data":"68c830bee30a244b7319819ac9568d97f6dfb2449661089c4f87cc471140577e"} Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.747196 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-27vx7" podStartSLOduration=121.747180149 podStartE2EDuration="2m1.747180149s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:51.74653143 +0000 UTC m=+146.083632458" watchObservedRunningTime="2026-02-02 10:55:51.747180149 +0000 UTC m=+146.084281177" Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.764056 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-s7xdh" event={"ID":"ec1b1633-82ef-481f-baac-8bb589265b21","Type":"ContainerStarted","Data":"6f0a837ad08c756fa1be457bff3b4f378d8281fac615b469ed3add94b27986ae"} Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.774904 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-5pw6k" podStartSLOduration=5.7748896720000005 podStartE2EDuration="5.774889672s" podCreationTimestamp="2026-02-02 10:55:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:51.770212154 +0000 UTC m=+146.107313182" watchObservedRunningTime="2026-02-02 10:55:51.774889672 +0000 UTC m=+146.111990700" Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.789154 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-nfzt8" event={"ID":"97092ddc-d56d-49b0-b290-30f113972b43","Type":"ContainerStarted","Data":"30477627f8fec4bdbb7ad2c53efd34e22e9f567894f660e953f8a58469a0b9fb"} Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.806756 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-s7xdh" podStartSLOduration=121.806741755 podStartE2EDuration="2m1.806741755s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:51.805313363 +0000 UTC m=+146.142414381" watchObservedRunningTime="2026-02-02 10:55:51.806741755 +0000 UTC m=+146.143842783" Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.824014 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-s7pcn" event={"ID":"93c46aec-8376-48e6-9b7c-fd0ef58bfd91","Type":"ContainerStarted","Data":"0f0c0eb7d0379f239c83ccf4a6ac559e83f4472388457523e957f44f06ceb459"} Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.836481 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:51 crc kubenswrapper[4838]: E0202 10:55:51.839224 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:52.339206257 +0000 UTC m=+146.676307285 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.845099 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-s84zq" event={"ID":"cdd0449c-7006-492f-90ff-5c7962dbe6f9","Type":"ContainerStarted","Data":"5e547377393433960942100f58ffbef2bbec0465c7eac3fe51f90afbe60ef895"} Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.857562 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-l9qxf" event={"ID":"bc3ee51b-3741-4106-9785-3d2b572ee205","Type":"ContainerStarted","Data":"9900807f389ea936bb454d6ea5944e80e7cc99b6ad720ea93ae151dd6f57f6ae"} Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.876752 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-p64qh" event={"ID":"9cc33e8c-fb5d-4006-be95-170ec3d739e9","Type":"ContainerStarted","Data":"51514dee4305c50f807e90af1054d5bcc6aa059f0bb7ed8582092d8b49a0f0f2"} Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.917345 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-dhhtj" podStartSLOduration=121.917325506 podStartE2EDuration="2m1.917325506s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:51.853579778 +0000 UTC m=+146.190680816" watchObservedRunningTime="2026-02-02 10:55:51.917325506 +0000 UTC m=+146.254426534" Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.917448 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-nfzt8" podStartSLOduration=120.91743986 podStartE2EDuration="2m0.91743986s" podCreationTimestamp="2026-02-02 10:53:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:51.875534791 +0000 UTC m=+146.212635819" watchObservedRunningTime="2026-02-02 10:55:51.91743986 +0000 UTC m=+146.254540928" Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.927869 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-s7pcn" podStartSLOduration=5.927855775 podStartE2EDuration="5.927855775s" podCreationTimestamp="2026-02-02 10:55:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:51.926883727 +0000 UTC m=+146.263984765" watchObservedRunningTime="2026-02-02 10:55:51.927855775 +0000 UTC m=+146.264956803" Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.931381 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-fzjz7" event={"ID":"88cf9601-400d-4dd7-91dd-c2ba110f1731","Type":"ContainerStarted","Data":"d39861d5b893383a3fa55674d1760b4f05570acf1e8bdb90ec1b16ffa86ed394"} Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.932118 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdxgd" event={"ID":"f27a4923-82be-471e-8b65-a3c325f389bc","Type":"ContainerStarted","Data":"c7f0213d7e0c10fd4b62d7fc35354834375e93e3841b8a73ca79daacae6a5f28"} Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.932723 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdxgd" Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.938292 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:51 crc kubenswrapper[4838]: E0202 10:55:51.939472 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:52.439458615 +0000 UTC m=+146.776559643 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.948529 4838 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-bdxgd container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.948583 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdxgd" podUID="f27a4923-82be-471e-8b65-a3c325f389bc" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.948829 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9ssxs" event={"ID":"f245ed23-8fbf-49f9-a284-6580d1025ceb","Type":"ContainerStarted","Data":"91e2fb2933b1475c98ae2591c2ab3811bf7b70de7eefd3185dcb45788feb7e78"} Feb 02 10:55:51 crc kubenswrapper[4838]: I0202 10:55:51.960050 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-l9qxf" podStartSLOduration=121.960031758 podStartE2EDuration="2m1.960031758s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:51.95940547 +0000 UTC m=+146.296506498" watchObservedRunningTime="2026-02-02 10:55:51.960031758 +0000 UTC m=+146.297132786" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:51.987534 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-fzjz7" podStartSLOduration=120.987511234 podStartE2EDuration="2m0.987511234s" podCreationTimestamp="2026-02-02 10:53:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:51.984739512 +0000 UTC m=+146.321840540" watchObservedRunningTime="2026-02-02 10:55:51.987511234 +0000 UTC m=+146.324612252" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:51.995283 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-mzp9s" event={"ID":"ed8b29bf-e322-4cc9-b027-0aea680ce349","Type":"ContainerStarted","Data":"48a3ad40d18edcac91230932fc80a8989fc697338feafbe33f45b0bdfb829c68"} Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.018883 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-mzp9s" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.022820 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdxgd" podStartSLOduration=122.022804538 podStartE2EDuration="2m2.022804538s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:52.02185311 +0000 UTC m=+146.358954148" watchObservedRunningTime="2026-02-02 10:55:52.022804538 +0000 UTC m=+146.359905556" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.034525 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj" event={"ID":"4fa1c8c4-4ea6-484c-906a-6e7c8016757b","Type":"ContainerStarted","Data":"0c7187a3636eb3c3e489663f9051cf28d2d7e42f92ee96011b4b3331c631161a"} Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.034572 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj" event={"ID":"4fa1c8c4-4ea6-484c-906a-6e7c8016757b","Type":"ContainerStarted","Data":"3f6839a6ef6de375728cbc5f1ed622c63055b46d91b766d2029f72a079a9aadc"} Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.038883 4838 patch_prober.go:28] interesting pod/downloads-7954f5f757-mzp9s container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.038914 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-mzp9s" podUID="ed8b29bf-e322-4cc9-b027-0aea680ce349" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.039539 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:52 crc kubenswrapper[4838]: E0202 10:55:52.040463 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:52.540448325 +0000 UTC m=+146.877549353 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.051001 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-mzp9s" podStartSLOduration=122.050986534 podStartE2EDuration="2m2.050986534s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:52.050214521 +0000 UTC m=+146.387315549" watchObservedRunningTime="2026-02-02 10:55:52.050986534 +0000 UTC m=+146.388087562" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.070244 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj" podStartSLOduration=122.070225148 podStartE2EDuration="2m2.070225148s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:52.06994071 +0000 UTC m=+146.407041748" watchObservedRunningTime="2026-02-02 10:55:52.070225148 +0000 UTC m=+146.407326176" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.096035 4838 generic.go:334] "Generic (PLEG): container finished" podID="b5aa31dc-369f-427e-97a4-b6245df5df4f" containerID="ebcef5d1674593f41ad820dc5dc342137b72f09915d22ef8ccc97937650c296f" exitCode=0 Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.096137 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" event={"ID":"b5aa31dc-369f-427e-97a4-b6245df5df4f","Type":"ContainerDied","Data":"ebcef5d1674593f41ad820dc5dc342137b72f09915d22ef8ccc97937650c296f"} Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.114346 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-dtdwq" event={"ID":"228a6edf-f91c-4f0f-8098-98831284e76c","Type":"ContainerStarted","Data":"8955cd106be055c2737ad566a27846f4c329c024b9535f04540c530590b5a890"} Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.114392 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-dtdwq" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.119749 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-cqh2x" event={"ID":"f5e8635f-8114-4ba8-b74c-6dd667ef41df","Type":"ContainerStarted","Data":"d50d3d02eaad5fdd124d7b60a06bd1b427a3f0a837decc6b68104acbe173e295"} Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.144924 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:52 crc kubenswrapper[4838]: E0202 10:55:52.145830 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:52.645815903 +0000 UTC m=+146.982916931 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.147982 4838 patch_prober.go:28] interesting pod/console-operator-58897d9998-dtdwq container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.15:8443/readyz\": dial tcp 10.217.0.15:8443: connect: connection refused" start-of-body= Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.148008 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-dtdwq" podUID="228a6edf-f91c-4f0f-8098-98831284e76c" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.15:8443/readyz\": dial tcp 10.217.0.15:8443: connect: connection refused" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.156796 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vkdls" event={"ID":"7c1d42f1-afc2-499f-9f51-aae97e6cc10a","Type":"ContainerStarted","Data":"f917eafcc9d3bc7394718c86c718980bdaee274daad3b47bb80b1bea034b52ea"} Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.157832 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vkdls" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.183890 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-cqh2x" podStartSLOduration=122.183872579 podStartE2EDuration="2m2.183872579s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:52.178932124 +0000 UTC m=+146.516033172" watchObservedRunningTime="2026-02-02 10:55:52.183872579 +0000 UTC m=+146.520973607" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.183906 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5fhpx" event={"ID":"65691c3c-b0b5-4460-a1d5-f64043ce7122","Type":"ContainerStarted","Data":"e82c7e979c2b5760d2bdf54b5aad0f08acc3a31e7c0bd05a763f73af8d8b817b"} Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.186793 4838 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-vkdls container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:8443/healthz\": dial tcp 10.217.0.38:8443: connect: connection refused" start-of-body= Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.186834 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vkdls" podUID="7c1d42f1-afc2-499f-9f51-aae97e6cc10a" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.38:8443/healthz\": dial tcp 10.217.0.38:8443: connect: connection refused" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.199148 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-dtdwq" podStartSLOduration=122.199130486 podStartE2EDuration="2m2.199130486s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:52.197011634 +0000 UTC m=+146.534112662" watchObservedRunningTime="2026-02-02 10:55:52.199130486 +0000 UTC m=+146.536231514" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.212378 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-sz4zx" event={"ID":"7dc10e03-1a96-49b1-b3d4-2126af30e87b","Type":"ContainerStarted","Data":"de76e75b2de253f6cc690697cbbcf52fd06150b24c38dce18c331774b8dec9d2"} Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.212437 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-sz4zx" event={"ID":"7dc10e03-1a96-49b1-b3d4-2126af30e87b","Type":"ContainerStarted","Data":"07ff08954abe2f98c49180fc1dd6deef392ddc07f4c19e9eeda8d46a5c93c099"} Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.227763 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7bcl9" event={"ID":"49c08401-a807-46f6-9aca-26e535cabae5","Type":"ContainerStarted","Data":"3dabcb9480bddbb782d47d61efb06559e2c7ac0414afdc4d74457fddfdce6f64"} Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.243957 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vkdls" podStartSLOduration=122.243940519 podStartE2EDuration="2m2.243940519s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:52.220449861 +0000 UTC m=+146.557550889" watchObservedRunningTime="2026-02-02 10:55:52.243940519 +0000 UTC m=+146.581041557" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.245632 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:52 crc kubenswrapper[4838]: E0202 10:55:52.246374 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:52.74636253 +0000 UTC m=+147.083463558 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.258691 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5fhpx" podStartSLOduration=122.258674061 podStartE2EDuration="2m2.258674061s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:52.258537627 +0000 UTC m=+146.595638665" watchObservedRunningTime="2026-02-02 10:55:52.258674061 +0000 UTC m=+146.595775089" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.259096 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2qsm5" event={"ID":"3e879c3f-8c95-449b-b9e7-439c78f48209","Type":"ContainerStarted","Data":"cb59937fbe88158c5201821494b1f0933c849f626ebd3341930491a07433790e"} Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.259689 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dmwvh" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.280954 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.308323 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-2qsm5" podStartSLOduration=122.308310596 podStartE2EDuration="2m2.308310596s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:52.307581545 +0000 UTC m=+146.644682573" watchObservedRunningTime="2026-02-02 10:55:52.308310596 +0000 UTC m=+146.645411624" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.319304 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.338136 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-j52s8" podStartSLOduration=122.33811948 podStartE2EDuration="2m2.33811948s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:52.337001437 +0000 UTC m=+146.674102465" watchObservedRunningTime="2026-02-02 10:55:52.33811948 +0000 UTC m=+146.675220508" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.350480 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:52 crc kubenswrapper[4838]: E0202 10:55:52.353063 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:52.853051987 +0000 UTC m=+147.190153005 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.411899 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dmwvh" podStartSLOduration=122.411884362 podStartE2EDuration="2m2.411884362s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:52.411100039 +0000 UTC m=+146.748201067" watchObservedRunningTime="2026-02-02 10:55:52.411884362 +0000 UTC m=+146.748985390" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.454258 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:52 crc kubenswrapper[4838]: E0202 10:55:52.454555 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:52.954540732 +0000 UTC m=+147.291641760 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.454578 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-cqh2x" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.468808 4838 patch_prober.go:28] interesting pod/router-default-5444994796-cqh2x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 10:55:52 crc kubenswrapper[4838]: [-]has-synced failed: reason withheld Feb 02 10:55:52 crc kubenswrapper[4838]: [+]process-running ok Feb 02 10:55:52 crc kubenswrapper[4838]: healthz check failed Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.468857 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cqh2x" podUID="f5e8635f-8114-4ba8-b74c-6dd667ef41df" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.557179 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:52 crc kubenswrapper[4838]: E0202 10:55:52.557875 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:53.057803779 +0000 UTC m=+147.394904807 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.658977 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:52 crc kubenswrapper[4838]: E0202 10:55:52.659277 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:53.159263193 +0000 UTC m=+147.496364221 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.760049 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:52 crc kubenswrapper[4838]: E0202 10:55:52.760660 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:53.260635314 +0000 UTC m=+147.597736342 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.861160 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:52 crc kubenswrapper[4838]: E0202 10:55:52.861386 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:53.361346766 +0000 UTC m=+147.698447794 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.861754 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:52 crc kubenswrapper[4838]: E0202 10:55:52.862194 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:53.36218629 +0000 UTC m=+147.699287318 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.962940 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:52 crc kubenswrapper[4838]: E0202 10:55:52.963221 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:53.46318177 +0000 UTC m=+147.800282798 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:52 crc kubenswrapper[4838]: I0202 10:55:52.964121 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:52 crc kubenswrapper[4838]: E0202 10:55:52.964547 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:53.46453461 +0000 UTC m=+147.801635638 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.065662 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:53 crc kubenswrapper[4838]: E0202 10:55:53.065879 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:53.565834829 +0000 UTC m=+147.902935857 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.066127 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:53 crc kubenswrapper[4838]: E0202 10:55:53.066496 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:53.566488578 +0000 UTC m=+147.903589816 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.166874 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:53 crc kubenswrapper[4838]: E0202 10:55:53.167209 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:53.667169519 +0000 UTC m=+148.004270547 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.168409 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:53 crc kubenswrapper[4838]: E0202 10:55:53.168861 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:53.668852089 +0000 UTC m=+148.005953117 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.265363 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-s84zq" event={"ID":"cdd0449c-7006-492f-90ff-5c7962dbe6f9","Type":"ContainerStarted","Data":"8289180a30f89ef2d14b238ebad8d4e92cc4af6ef9a88489f311bc70a7cb52bd"} Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.267756 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fcmcn" event={"ID":"99d83385-4586-40c5-af02-d293febdffc2","Type":"ContainerStarted","Data":"d269df26ce599ec263dbcc5e8d17a9268b93cb36abd02c21adbd2372a81f3efb"} Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.267787 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fcmcn" event={"ID":"99d83385-4586-40c5-af02-d293febdffc2","Type":"ContainerStarted","Data":"0e5eb7cad1e5bf04d25be44fcfe9d41649a17618647265fbaadd97eb9db2a5f2"} Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.268981 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:53 crc kubenswrapper[4838]: E0202 10:55:53.269221 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:53.76918972 +0000 UTC m=+148.106290748 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.269362 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5fhpx" event={"ID":"65691c3c-b0b5-4460-a1d5-f64043ce7122","Type":"ContainerStarted","Data":"bb4de65364545fc027ae2088cfe39708434aa0c9a7f0c422aaa02ec724038d18"} Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.269422 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:53 crc kubenswrapper[4838]: E0202 10:55:53.269843 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:53.769828698 +0000 UTC m=+148.106929726 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.271346 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-sz4zx" event={"ID":"7dc10e03-1a96-49b1-b3d4-2126af30e87b","Type":"ContainerStarted","Data":"b459907c617bee01b4bbb998cc4bb03fdaa251a6d75eb0ae171e7d9f7cb6e66a"} Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.271905 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-sz4zx" Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.273512 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7bcl9" event={"ID":"49c08401-a807-46f6-9aca-26e535cabae5","Type":"ContainerStarted","Data":"f86318b8791eaf874b0e1d6e7017de30abde7da2438fad75a284662d9f2fb013"} Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.273540 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7bcl9" event={"ID":"49c08401-a807-46f6-9aca-26e535cabae5","Type":"ContainerStarted","Data":"1a39997b4a9cdb29f756c921fd6a602d0be2f5d3e4569c190a4c5300bea5047e"} Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.273916 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7bcl9" Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.275238 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-p64qh" event={"ID":"9cc33e8c-fb5d-4006-be95-170ec3d739e9","Type":"ContainerStarted","Data":"999eddb7643a1943b23424fa3c0edb0ec968ec459568fa5daadc8100f171dab6"} Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.276600 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gxndj" event={"ID":"b6385779-f490-4523-8a93-5c043fb29f56","Type":"ContainerStarted","Data":"0c09cc400c915681718b67241d474b1b7650545871f06b489be480d640fe731c"} Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.279201 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" event={"ID":"b5aa31dc-369f-427e-97a4-b6245df5df4f","Type":"ContainerStarted","Data":"894682b1e3e29d9098d57bdc8ffa8eaa25b886f1a32b67b8f276e512d0d70298"} Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.279227 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" event={"ID":"b5aa31dc-369f-427e-97a4-b6245df5df4f","Type":"ContainerStarted","Data":"03bf49337780c2746b4f7513b657983842105eaffcca444888f07bc96f2a6f70"} Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.280668 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-nfzt8" event={"ID":"97092ddc-d56d-49b0-b290-30f113972b43","Type":"ContainerStarted","Data":"c6d913c4af1772ac5d14fabefa159bdb9c658b868dab54d8ad297b605f127e04"} Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.282024 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" event={"ID":"5d5134e9-c9b5-4959-8092-581e357e5ebf","Type":"ContainerStarted","Data":"2f451530898b0ae9674858e58f33950f8871835f1ca94e1505e332bb5b5b0939"} Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.285943 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" event={"ID":"3bffe14a-0216-4854-b0fc-7c482a297b82","Type":"ContainerStarted","Data":"4bcd58e413a776a9641083c37d859395b5d0a24ade0d69ad3cc133dd07cd6896"} Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.286708 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.288302 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" event={"ID":"96f87e1d-9edf-44a3-b884-789dd6a3f334","Type":"ContainerStarted","Data":"a270ef179c670b7ccb17d5fcb34ac98d1c87a1fd9abe511e2f6d8f864ccf9dfe"} Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.288770 4838 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-s96vn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" start-of-body= Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.288828 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" podUID="3bffe14a-0216-4854-b0fc-7c482a297b82" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.290839 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-s7xdh" event={"ID":"ec1b1633-82ef-481f-baac-8bb589265b21","Type":"ContainerStarted","Data":"8fe44fe3225c1a348a94fd03e045741c7cdab506a72b423664b322a1e5c9b2ad"} Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.293140 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9ssxs" event={"ID":"f245ed23-8fbf-49f9-a284-6580d1025ceb","Type":"ContainerStarted","Data":"f1e5bc728540268e2568eaac4f9fd67d6d3e3b615e39f0d00717c7d391a912fd"} Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.296100 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" event={"ID":"4186f5d8-e330-4fae-943e-a6abbdb49b96","Type":"ContainerStarted","Data":"e7715f2e6accfddf6e109a2762cb1082b45157dce60a8aa70323e8ed80db4d51"} Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.298170 4838 patch_prober.go:28] interesting pod/downloads-7954f5f757-mzp9s container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.298230 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-mzp9s" podUID="ed8b29bf-e322-4cc9-b027-0aea680ce349" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.305268 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdxgd" Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.356041 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vkdls" Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.366039 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.366099 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.367040 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-s84zq" podStartSLOduration=123.367016097 podStartE2EDuration="2m3.367016097s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:53.334926296 +0000 UTC m=+147.672027344" watchObservedRunningTime="2026-02-02 10:55:53.367016097 +0000 UTC m=+147.704117125" Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.370955 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:53 crc kubenswrapper[4838]: E0202 10:55:53.373979 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:53.87395138 +0000 UTC m=+148.211052408 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.458790 4838 patch_prober.go:28] interesting pod/router-default-5444994796-cqh2x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 10:55:53 crc kubenswrapper[4838]: [-]has-synced failed: reason withheld Feb 02 10:55:53 crc kubenswrapper[4838]: [+]process-running ok Feb 02 10:55:53 crc kubenswrapper[4838]: healthz check failed Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.459146 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cqh2x" podUID="f5e8635f-8114-4ba8-b74c-6dd667ef41df" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.475911 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:53 crc kubenswrapper[4838]: E0202 10:55:53.476379 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:53.976363432 +0000 UTC m=+148.313464460 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.577372 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:53 crc kubenswrapper[4838]: E0202 10:55:53.577624 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:54.077586129 +0000 UTC m=+148.414687147 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.577988 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:53 crc kubenswrapper[4838]: E0202 10:55:53.578649 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:54.078629809 +0000 UTC m=+148.415730827 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.579373 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-sz4zx" podStartSLOduration=7.57935016 podStartE2EDuration="7.57935016s" podCreationTimestamp="2026-02-02 10:55:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:53.576469806 +0000 UTC m=+147.913570844" watchObservedRunningTime="2026-02-02 10:55:53.57935016 +0000 UTC m=+147.916451188" Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.580807 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" podStartSLOduration=123.580798453 podStartE2EDuration="2m3.580798453s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:53.514490919 +0000 UTC m=+147.851591947" watchObservedRunningTime="2026-02-02 10:55:53.580798453 +0000 UTC m=+147.917899481" Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.643661 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9ssxs" podStartSLOduration=123.643610934 podStartE2EDuration="2m3.643610934s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:53.639315008 +0000 UTC m=+147.976416036" watchObservedRunningTime="2026-02-02 10:55:53.643610934 +0000 UTC m=+147.980711962" Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.679866 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:53 crc kubenswrapper[4838]: E0202 10:55:53.680554 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:54.180528066 +0000 UTC m=+148.517629094 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.701217 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-dtdwq" Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.739855 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" podStartSLOduration=123.739820084 podStartE2EDuration="2m3.739820084s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:53.693408333 +0000 UTC m=+148.030509371" watchObservedRunningTime="2026-02-02 10:55:53.739820084 +0000 UTC m=+148.076921112" Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.782929 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zdtxg" Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.786063 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:53 crc kubenswrapper[4838]: E0202 10:55:53.786408 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:54.286395489 +0000 UTC m=+148.623496517 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.794132 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7bcl9" podStartSLOduration=123.794117115 podStartE2EDuration="2m3.794117115s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:53.751047183 +0000 UTC m=+148.088148211" watchObservedRunningTime="2026-02-02 10:55:53.794117115 +0000 UTC m=+148.131218143" Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.794508 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-p64qh" podStartSLOduration=123.794504457 podStartE2EDuration="2m3.794504457s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:53.794065464 +0000 UTC m=+148.131166502" watchObservedRunningTime="2026-02-02 10:55:53.794504457 +0000 UTC m=+148.131605485" Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.870104 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-c2fjv" podStartSLOduration=123.870089322 podStartE2EDuration="2m3.870089322s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:53.868801424 +0000 UTC m=+148.205902452" watchObservedRunningTime="2026-02-02 10:55:53.870089322 +0000 UTC m=+148.207190350" Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.894661 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:53 crc kubenswrapper[4838]: E0202 10:55:53.895046 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:54.395031133 +0000 UTC m=+148.732132161 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:53 crc kubenswrapper[4838]: I0202 10:55:53.995903 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:53 crc kubenswrapper[4838]: E0202 10:55:53.996235 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:54.496222639 +0000 UTC m=+148.833323667 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.008597 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.008781 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.010659 4838 patch_prober.go:28] interesting pod/apiserver-76f77b778f-zfr2j container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.17:8443/livez\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.010699 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" podUID="b5aa31dc-369f-427e-97a4-b6245df5df4f" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.17:8443/livez\": dial tcp 10.217.0.17:8443: connect: connection refused" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.041900 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" podStartSLOduration=124.041873017 podStartE2EDuration="2m4.041873017s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:53.908745355 +0000 UTC m=+148.245846373" watchObservedRunningTime="2026-02-02 10:55:54.041873017 +0000 UTC m=+148.378974035" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.042483 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ntc2n"] Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.043520 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ntc2n" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.044022 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gxndj" podStartSLOduration=124.043996679 podStartE2EDuration="2m4.043996679s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:54.03855437 +0000 UTC m=+148.375655398" watchObservedRunningTime="2026-02-02 10:55:54.043996679 +0000 UTC m=+148.381097707" Feb 02 10:55:54 crc kubenswrapper[4838]: W0202 10:55:54.062473 4838 reflector.go:561] object-"openshift-marketplace"/"community-operators-dockercfg-dmngl": failed to list *v1.Secret: secrets "community-operators-dockercfg-dmngl" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-marketplace": no relationship found between node 'crc' and this object Feb 02 10:55:54 crc kubenswrapper[4838]: E0202 10:55:54.062516 4838 reflector.go:158] "Unhandled Error" err="object-\"openshift-marketplace\"/\"community-operators-dockercfg-dmngl\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"community-operators-dockercfg-dmngl\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-marketplace\": no relationship found between node 'crc' and this object" logger="UnhandledError" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.096777 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:54 crc kubenswrapper[4838]: E0202 10:55:54.097037 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:54.596993683 +0000 UTC m=+148.934094711 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.097247 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7b6d22c-5441-4f5c-830b-17d67446352d-utilities\") pod \"community-operators-ntc2n\" (UID: \"d7b6d22c-5441-4f5c-830b-17d67446352d\") " pod="openshift-marketplace/community-operators-ntc2n" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.097352 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7b6d22c-5441-4f5c-830b-17d67446352d-catalog-content\") pod \"community-operators-ntc2n\" (UID: \"d7b6d22c-5441-4f5c-830b-17d67446352d\") " pod="openshift-marketplace/community-operators-ntc2n" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.097473 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.097567 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6sqx\" (UniqueName: \"kubernetes.io/projected/d7b6d22c-5441-4f5c-830b-17d67446352d-kube-api-access-l6sqx\") pod \"community-operators-ntc2n\" (UID: \"d7b6d22c-5441-4f5c-830b-17d67446352d\") " pod="openshift-marketplace/community-operators-ntc2n" Feb 02 10:55:54 crc kubenswrapper[4838]: E0202 10:55:54.097852 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:54.597831137 +0000 UTC m=+148.934932165 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.113663 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fcmcn" podStartSLOduration=124.113646191 podStartE2EDuration="2m4.113646191s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:54.112333742 +0000 UTC m=+148.449434770" watchObservedRunningTime="2026-02-02 10:55:54.113646191 +0000 UTC m=+148.450747219" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.117466 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ntc2n"] Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.198625 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.198850 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6sqx\" (UniqueName: \"kubernetes.io/projected/d7b6d22c-5441-4f5c-830b-17d67446352d-kube-api-access-l6sqx\") pod \"community-operators-ntc2n\" (UID: \"d7b6d22c-5441-4f5c-830b-17d67446352d\") " pod="openshift-marketplace/community-operators-ntc2n" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.198925 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7b6d22c-5441-4f5c-830b-17d67446352d-utilities\") pod \"community-operators-ntc2n\" (UID: \"d7b6d22c-5441-4f5c-830b-17d67446352d\") " pod="openshift-marketplace/community-operators-ntc2n" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.198946 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7b6d22c-5441-4f5c-830b-17d67446352d-catalog-content\") pod \"community-operators-ntc2n\" (UID: \"d7b6d22c-5441-4f5c-830b-17d67446352d\") " pod="openshift-marketplace/community-operators-ntc2n" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.199370 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7b6d22c-5441-4f5c-830b-17d67446352d-catalog-content\") pod \"community-operators-ntc2n\" (UID: \"d7b6d22c-5441-4f5c-830b-17d67446352d\") " pod="openshift-marketplace/community-operators-ntc2n" Feb 02 10:55:54 crc kubenswrapper[4838]: E0202 10:55:54.199661 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:54.699647531 +0000 UTC m=+149.036748549 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.199860 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7b6d22c-5441-4f5c-830b-17d67446352d-utilities\") pod \"community-operators-ntc2n\" (UID: \"d7b6d22c-5441-4f5c-830b-17d67446352d\") " pod="openshift-marketplace/community-operators-ntc2n" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.263600 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-m4f9j"] Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.264744 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m4f9j" Feb 02 10:55:54 crc kubenswrapper[4838]: W0202 10:55:54.269434 4838 reflector.go:561] object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g": failed to list *v1.Secret: secrets "certified-operators-dockercfg-4rs5g" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-marketplace": no relationship found between node 'crc' and this object Feb 02 10:55:54 crc kubenswrapper[4838]: E0202 10:55:54.269647 4838 reflector.go:158] "Unhandled Error" err="object-\"openshift-marketplace\"/\"certified-operators-dockercfg-4rs5g\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"certified-operators-dockercfg-4rs5g\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-marketplace\": no relationship found between node 'crc' and this object" logger="UnhandledError" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.296041 4838 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-dmwvh container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.16:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.296091 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dmwvh" podUID="03d2fa95-b476-4259-8d2d-69bd31c28da4" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.16:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.299208 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m4f9j"] Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.299953 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:54 crc kubenswrapper[4838]: E0202 10:55:54.300296 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:54.800286411 +0000 UTC m=+149.137387439 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.327785 4838 generic.go:334] "Generic (PLEG): container finished" podID="4fa1c8c4-4ea6-484c-906a-6e7c8016757b" containerID="0c7187a3636eb3c3e489663f9051cf28d2d7e42f92ee96011b4b3331c631161a" exitCode=0 Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.327860 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj" event={"ID":"4fa1c8c4-4ea6-484c-906a-6e7c8016757b","Type":"ContainerDied","Data":"0c7187a3636eb3c3e489663f9051cf28d2d7e42f92ee96011b4b3331c631161a"} Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.354080 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" event={"ID":"96f87e1d-9edf-44a3-b884-789dd6a3f334","Type":"ContainerStarted","Data":"79a4211adf9271dc3e4849e1dc627e299eef46f4f316ce76fe5ae45567f754e5"} Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.361696 4838 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-s96vn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" start-of-body= Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.361748 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" podUID="3bffe14a-0216-4854-b0fc-7c482a297b82" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.388244 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dmwvh" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.390410 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6sqx\" (UniqueName: \"kubernetes.io/projected/d7b6d22c-5441-4f5c-830b-17d67446352d-kube-api-access-l6sqx\") pod \"community-operators-ntc2n\" (UID: \"d7b6d22c-5441-4f5c-830b-17d67446352d\") " pod="openshift-marketplace/community-operators-ntc2n" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.408234 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:54 crc kubenswrapper[4838]: E0202 10:55:54.408398 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:54.908376819 +0000 UTC m=+149.245477837 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.408661 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cdb7ade3-e1b6-436b-a5df-3abb972b72fa-utilities\") pod \"certified-operators-m4f9j\" (UID: \"cdb7ade3-e1b6-436b-a5df-3abb972b72fa\") " pod="openshift-marketplace/certified-operators-m4f9j" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.408902 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cdb7ade3-e1b6-436b-a5df-3abb972b72fa-catalog-content\") pod \"certified-operators-m4f9j\" (UID: \"cdb7ade3-e1b6-436b-a5df-3abb972b72fa\") " pod="openshift-marketplace/certified-operators-m4f9j" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.408972 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.409156 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4fv8\" (UniqueName: \"kubernetes.io/projected/cdb7ade3-e1b6-436b-a5df-3abb972b72fa-kube-api-access-h4fv8\") pod \"certified-operators-m4f9j\" (UID: \"cdb7ade3-e1b6-436b-a5df-3abb972b72fa\") " pod="openshift-marketplace/certified-operators-m4f9j" Feb 02 10:55:54 crc kubenswrapper[4838]: E0202 10:55:54.411442 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:54.911432649 +0000 UTC m=+149.248533667 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.445000 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-g2rlc"] Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.446216 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.446294 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g2rlc" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.470830 4838 patch_prober.go:28] interesting pod/router-default-5444994796-cqh2x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 10:55:54 crc kubenswrapper[4838]: [-]has-synced failed: reason withheld Feb 02 10:55:54 crc kubenswrapper[4838]: [+]process-running ok Feb 02 10:55:54 crc kubenswrapper[4838]: healthz check failed Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.470890 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cqh2x" podUID="f5e8635f-8114-4ba8-b74c-6dd667ef41df" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.492248 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g2rlc"] Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.511375 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.511530 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da4cb631-2c1a-4acd-8e10-cf1fbdb099e1-utilities\") pod \"community-operators-g2rlc\" (UID: \"da4cb631-2c1a-4acd-8e10-cf1fbdb099e1\") " pod="openshift-marketplace/community-operators-g2rlc" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.511589 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4fv8\" (UniqueName: \"kubernetes.io/projected/cdb7ade3-e1b6-436b-a5df-3abb972b72fa-kube-api-access-h4fv8\") pod \"certified-operators-m4f9j\" (UID: \"cdb7ade3-e1b6-436b-a5df-3abb972b72fa\") " pod="openshift-marketplace/certified-operators-m4f9j" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.511694 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da4cb631-2c1a-4acd-8e10-cf1fbdb099e1-catalog-content\") pod \"community-operators-g2rlc\" (UID: \"da4cb631-2c1a-4acd-8e10-cf1fbdb099e1\") " pod="openshift-marketplace/community-operators-g2rlc" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.511744 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cdb7ade3-e1b6-436b-a5df-3abb972b72fa-utilities\") pod \"certified-operators-m4f9j\" (UID: \"cdb7ade3-e1b6-436b-a5df-3abb972b72fa\") " pod="openshift-marketplace/certified-operators-m4f9j" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.511797 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n2xt\" (UniqueName: \"kubernetes.io/projected/da4cb631-2c1a-4acd-8e10-cf1fbdb099e1-kube-api-access-8n2xt\") pod \"community-operators-g2rlc\" (UID: \"da4cb631-2c1a-4acd-8e10-cf1fbdb099e1\") " pod="openshift-marketplace/community-operators-g2rlc" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.511822 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cdb7ade3-e1b6-436b-a5df-3abb972b72fa-catalog-content\") pod \"certified-operators-m4f9j\" (UID: \"cdb7ade3-e1b6-436b-a5df-3abb972b72fa\") " pod="openshift-marketplace/certified-operators-m4f9j" Feb 02 10:55:54 crc kubenswrapper[4838]: E0202 10:55:54.512021 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:55.011992066 +0000 UTC m=+149.349093094 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.512857 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cdb7ade3-e1b6-436b-a5df-3abb972b72fa-utilities\") pod \"certified-operators-m4f9j\" (UID: \"cdb7ade3-e1b6-436b-a5df-3abb972b72fa\") " pod="openshift-marketplace/certified-operators-m4f9j" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.520424 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cdb7ade3-e1b6-436b-a5df-3abb972b72fa-catalog-content\") pod \"certified-operators-m4f9j\" (UID: \"cdb7ade3-e1b6-436b-a5df-3abb972b72fa\") " pod="openshift-marketplace/certified-operators-m4f9j" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.584041 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4fv8\" (UniqueName: \"kubernetes.io/projected/cdb7ade3-e1b6-436b-a5df-3abb972b72fa-kube-api-access-h4fv8\") pod \"certified-operators-m4f9j\" (UID: \"cdb7ade3-e1b6-436b-a5df-3abb972b72fa\") " pod="openshift-marketplace/certified-operators-m4f9j" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.614016 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n2xt\" (UniqueName: \"kubernetes.io/projected/da4cb631-2c1a-4acd-8e10-cf1fbdb099e1-kube-api-access-8n2xt\") pod \"community-operators-g2rlc\" (UID: \"da4cb631-2c1a-4acd-8e10-cf1fbdb099e1\") " pod="openshift-marketplace/community-operators-g2rlc" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.614069 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.614090 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da4cb631-2c1a-4acd-8e10-cf1fbdb099e1-utilities\") pod \"community-operators-g2rlc\" (UID: \"da4cb631-2c1a-4acd-8e10-cf1fbdb099e1\") " pod="openshift-marketplace/community-operators-g2rlc" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.614157 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da4cb631-2c1a-4acd-8e10-cf1fbdb099e1-catalog-content\") pod \"community-operators-g2rlc\" (UID: \"da4cb631-2c1a-4acd-8e10-cf1fbdb099e1\") " pod="openshift-marketplace/community-operators-g2rlc" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.614497 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da4cb631-2c1a-4acd-8e10-cf1fbdb099e1-catalog-content\") pod \"community-operators-g2rlc\" (UID: \"da4cb631-2c1a-4acd-8e10-cf1fbdb099e1\") " pod="openshift-marketplace/community-operators-g2rlc" Feb 02 10:55:54 crc kubenswrapper[4838]: E0202 10:55:54.615018 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:55.115006025 +0000 UTC m=+149.452107043 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.615341 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da4cb631-2c1a-4acd-8e10-cf1fbdb099e1-utilities\") pod \"community-operators-g2rlc\" (UID: \"da4cb631-2c1a-4acd-8e10-cf1fbdb099e1\") " pod="openshift-marketplace/community-operators-g2rlc" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.615945 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rbh6x"] Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.630441 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rbh6x" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.637466 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rbh6x"] Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.707845 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n2xt\" (UniqueName: \"kubernetes.io/projected/da4cb631-2c1a-4acd-8e10-cf1fbdb099e1-kube-api-access-8n2xt\") pod \"community-operators-g2rlc\" (UID: \"da4cb631-2c1a-4acd-8e10-cf1fbdb099e1\") " pod="openshift-marketplace/community-operators-g2rlc" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.717817 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.717992 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21ad3b27-d69d-4db6-a1c6-fac312ad582d-utilities\") pod \"certified-operators-rbh6x\" (UID: \"21ad3b27-d69d-4db6-a1c6-fac312ad582d\") " pod="openshift-marketplace/certified-operators-rbh6x" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.718017 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8wwd\" (UniqueName: \"kubernetes.io/projected/21ad3b27-d69d-4db6-a1c6-fac312ad582d-kube-api-access-l8wwd\") pod \"certified-operators-rbh6x\" (UID: \"21ad3b27-d69d-4db6-a1c6-fac312ad582d\") " pod="openshift-marketplace/certified-operators-rbh6x" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.718085 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21ad3b27-d69d-4db6-a1c6-fac312ad582d-catalog-content\") pod \"certified-operators-rbh6x\" (UID: \"21ad3b27-d69d-4db6-a1c6-fac312ad582d\") " pod="openshift-marketplace/certified-operators-rbh6x" Feb 02 10:55:54 crc kubenswrapper[4838]: E0202 10:55:54.718222 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:55.218205489 +0000 UTC m=+149.555306517 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.827447 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21ad3b27-d69d-4db6-a1c6-fac312ad582d-catalog-content\") pod \"certified-operators-rbh6x\" (UID: \"21ad3b27-d69d-4db6-a1c6-fac312ad582d\") " pod="openshift-marketplace/certified-operators-rbh6x" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.828064 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.828187 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21ad3b27-d69d-4db6-a1c6-fac312ad582d-utilities\") pod \"certified-operators-rbh6x\" (UID: \"21ad3b27-d69d-4db6-a1c6-fac312ad582d\") " pod="openshift-marketplace/certified-operators-rbh6x" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.828263 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8wwd\" (UniqueName: \"kubernetes.io/projected/21ad3b27-d69d-4db6-a1c6-fac312ad582d-kube-api-access-l8wwd\") pod \"certified-operators-rbh6x\" (UID: \"21ad3b27-d69d-4db6-a1c6-fac312ad582d\") " pod="openshift-marketplace/certified-operators-rbh6x" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.828811 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21ad3b27-d69d-4db6-a1c6-fac312ad582d-catalog-content\") pod \"certified-operators-rbh6x\" (UID: \"21ad3b27-d69d-4db6-a1c6-fac312ad582d\") " pod="openshift-marketplace/certified-operators-rbh6x" Feb 02 10:55:54 crc kubenswrapper[4838]: E0202 10:55:54.828982 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:55.328967856 +0000 UTC m=+149.666068884 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.829110 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21ad3b27-d69d-4db6-a1c6-fac312ad582d-utilities\") pod \"certified-operators-rbh6x\" (UID: \"21ad3b27-d69d-4db6-a1c6-fac312ad582d\") " pod="openshift-marketplace/certified-operators-rbh6x" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.857448 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8wwd\" (UniqueName: \"kubernetes.io/projected/21ad3b27-d69d-4db6-a1c6-fac312ad582d-kube-api-access-l8wwd\") pod \"certified-operators-rbh6x\" (UID: \"21ad3b27-d69d-4db6-a1c6-fac312ad582d\") " pod="openshift-marketplace/certified-operators-rbh6x" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.930802 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:54 crc kubenswrapper[4838]: E0202 10:55:54.931084 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:55.431066208 +0000 UTC m=+149.768167236 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.953008 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.959309 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ntc2n" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.960141 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g2rlc" Feb 02 10:55:54 crc kubenswrapper[4838]: I0202 10:55:54.978238 4838 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.032325 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:55 crc kubenswrapper[4838]: E0202 10:55:55.032668 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:55.532654756 +0000 UTC m=+149.869755774 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.134234 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:55 crc kubenswrapper[4838]: E0202 10:55:55.134454 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:55.634414068 +0000 UTC m=+149.971515096 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.134739 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:55 crc kubenswrapper[4838]: E0202 10:55:55.135133 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:55.635126379 +0000 UTC m=+149.972227407 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.236327 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:55 crc kubenswrapper[4838]: E0202 10:55:55.236502 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:55.73647541 +0000 UTC m=+150.073576438 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.236983 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:55 crc kubenswrapper[4838]: E0202 10:55:55.237495 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:55.737471009 +0000 UTC m=+150.074572037 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.339023 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:55 crc kubenswrapper[4838]: E0202 10:55:55.339431 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:55.839418217 +0000 UTC m=+150.176519245 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.363726 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" event={"ID":"96f87e1d-9edf-44a3-b884-789dd6a3f334","Type":"ContainerStarted","Data":"abda0b1f11ee98314043b108c2a8e1e38de4f74ac122602a285726fde982b605"} Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.363797 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" event={"ID":"96f87e1d-9edf-44a3-b884-789dd6a3f334","Type":"ContainerStarted","Data":"eab53a01fa2047128b0907bc1ae053229ae82bac6f7441e56b68ea077e4d0913"} Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.369036 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.379177 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4tf2g" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.404276 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-zzq5h" podStartSLOduration=9.404259908 podStartE2EDuration="9.404259908s" podCreationTimestamp="2026-02-02 10:55:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:55.396566782 +0000 UTC m=+149.733667810" watchObservedRunningTime="2026-02-02 10:55:55.404259908 +0000 UTC m=+149.741360936" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.408444 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g2rlc"] Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.440168 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.440704 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.440921 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:55 crc kubenswrapper[4838]: E0202 10:55:55.443700 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:55.943685473 +0000 UTC m=+150.280786501 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.445262 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.454845 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.457264 4838 patch_prober.go:28] interesting pod/router-default-5444994796-cqh2x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 10:55:55 crc kubenswrapper[4838]: [-]has-synced failed: reason withheld Feb 02 10:55:55 crc kubenswrapper[4838]: [+]process-running ok Feb 02 10:55:55 crc kubenswrapper[4838]: healthz check failed Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.457329 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cqh2x" podUID="f5e8635f-8114-4ba8-b74c-6dd667ef41df" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.464426 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.465475 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m4f9j" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.471338 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rbh6x" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.487242 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ntc2n"] Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.524234 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.541726 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.542022 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.542066 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:55 crc kubenswrapper[4838]: E0202 10:55:55.542223 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:56.0421765 +0000 UTC m=+150.379277528 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.547176 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.547643 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.644535 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:55 crc kubenswrapper[4838]: E0202 10:55:55.645264 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:56.145253241 +0000 UTC m=+150.482354269 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.733692 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.745918 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:55 crc kubenswrapper[4838]: E0202 10:55:55.746053 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 10:55:56.246023465 +0000 UTC m=+150.583124503 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.746136 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:55 crc kubenswrapper[4838]: E0202 10:55:55.746437 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 10:55:56.246426027 +0000 UTC m=+150.583527055 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8b9cc" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.748863 4838 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-02-02T10:55:54.978499439Z","Handler":null,"Name":""} Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.754648 4838 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.754683 4838 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.758018 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.838519 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.851805 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4fa1c8c4-4ea6-484c-906a-6e7c8016757b-secret-volume\") pod \"4fa1c8c4-4ea6-484c-906a-6e7c8016757b\" (UID: \"4fa1c8c4-4ea6-484c-906a-6e7c8016757b\") " Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.852113 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.852157 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2l5n9\" (UniqueName: \"kubernetes.io/projected/4fa1c8c4-4ea6-484c-906a-6e7c8016757b-kube-api-access-2l5n9\") pod \"4fa1c8c4-4ea6-484c-906a-6e7c8016757b\" (UID: \"4fa1c8c4-4ea6-484c-906a-6e7c8016757b\") " Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.852178 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4fa1c8c4-4ea6-484c-906a-6e7c8016757b-config-volume\") pod \"4fa1c8c4-4ea6-484c-906a-6e7c8016757b\" (UID: \"4fa1c8c4-4ea6-484c-906a-6e7c8016757b\") " Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.854310 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fa1c8c4-4ea6-484c-906a-6e7c8016757b-config-volume" (OuterVolumeSpecName: "config-volume") pod "4fa1c8c4-4ea6-484c-906a-6e7c8016757b" (UID: "4fa1c8c4-4ea6-484c-906a-6e7c8016757b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.857884 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fa1c8c4-4ea6-484c-906a-6e7c8016757b-kube-api-access-2l5n9" (OuterVolumeSpecName: "kube-api-access-2l5n9") pod "4fa1c8c4-4ea6-484c-906a-6e7c8016757b" (UID: "4fa1c8c4-4ea6-484c-906a-6e7c8016757b"). InnerVolumeSpecName "kube-api-access-2l5n9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.860068 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fa1c8c4-4ea6-484c-906a-6e7c8016757b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4fa1c8c4-4ea6-484c-906a-6e7c8016757b" (UID: "4fa1c8c4-4ea6-484c-906a-6e7c8016757b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.867051 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.958596 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tnscg"] Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.963921 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.964133 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2l5n9\" (UniqueName: \"kubernetes.io/projected/4fa1c8c4-4ea6-484c-906a-6e7c8016757b-kube-api-access-2l5n9\") on node \"crc\" DevicePath \"\"" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.964144 4838 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4fa1c8c4-4ea6-484c-906a-6e7c8016757b-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.964155 4838 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4fa1c8c4-4ea6-484c-906a-6e7c8016757b-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 10:55:55 crc kubenswrapper[4838]: E0202 10:55:55.965766 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fa1c8c4-4ea6-484c-906a-6e7c8016757b" containerName="collect-profiles" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.965797 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fa1c8c4-4ea6-484c-906a-6e7c8016757b" containerName="collect-profiles" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.965956 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fa1c8c4-4ea6-484c-906a-6e7c8016757b" containerName="collect-profiles" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.966698 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tnscg" Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.977231 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tnscg"] Feb 02 10:55:55 crc kubenswrapper[4838]: I0202 10:55:55.989681 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 02 10:55:56 crc kubenswrapper[4838]: W0202 10:55:56.002195 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-3aa6843f14bd8bcc53efa308f8b6a4119d76fd609578ded663069c9e8cf77564 WatchSource:0}: Error finding container 3aa6843f14bd8bcc53efa308f8b6a4119d76fd609578ded663069c9e8cf77564: Status 404 returned error can't find the container with id 3aa6843f14bd8bcc53efa308f8b6a4119d76fd609578ded663069c9e8cf77564 Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.006058 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m4f9j"] Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.022591 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rbh6x"] Feb 02 10:55:56 crc kubenswrapper[4838]: W0202 10:55:56.036730 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod21ad3b27_d69d_4db6_a1c6_fac312ad582d.slice/crio-119aa7b425e1a760f6b86ab52cb7c3033d1b4c6642e7373cf0a0243a4b530651 WatchSource:0}: Error finding container 119aa7b425e1a760f6b86ab52cb7c3033d1b4c6642e7373cf0a0243a4b530651: Status 404 returned error can't find the container with id 119aa7b425e1a760f6b86ab52cb7c3033d1b4c6642e7373cf0a0243a4b530651 Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.065668 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf0d00a9-64a4-46b9-9bc7-8617b0f3692a-utilities\") pod \"redhat-marketplace-tnscg\" (UID: \"bf0d00a9-64a4-46b9-9bc7-8617b0f3692a\") " pod="openshift-marketplace/redhat-marketplace-tnscg" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.065710 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf0d00a9-64a4-46b9-9bc7-8617b0f3692a-catalog-content\") pod \"redhat-marketplace-tnscg\" (UID: \"bf0d00a9-64a4-46b9-9bc7-8617b0f3692a\") " pod="openshift-marketplace/redhat-marketplace-tnscg" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.065815 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gk2l\" (UniqueName: \"kubernetes.io/projected/bf0d00a9-64a4-46b9-9bc7-8617b0f3692a-kube-api-access-5gk2l\") pod \"redhat-marketplace-tnscg\" (UID: \"bf0d00a9-64a4-46b9-9bc7-8617b0f3692a\") " pod="openshift-marketplace/redhat-marketplace-tnscg" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.155676 4838 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.156004 4838 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.166686 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf0d00a9-64a4-46b9-9bc7-8617b0f3692a-utilities\") pod \"redhat-marketplace-tnscg\" (UID: \"bf0d00a9-64a4-46b9-9bc7-8617b0f3692a\") " pod="openshift-marketplace/redhat-marketplace-tnscg" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.166747 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf0d00a9-64a4-46b9-9bc7-8617b0f3692a-catalog-content\") pod \"redhat-marketplace-tnscg\" (UID: \"bf0d00a9-64a4-46b9-9bc7-8617b0f3692a\") " pod="openshift-marketplace/redhat-marketplace-tnscg" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.166838 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gk2l\" (UniqueName: \"kubernetes.io/projected/bf0d00a9-64a4-46b9-9bc7-8617b0f3692a-kube-api-access-5gk2l\") pod \"redhat-marketplace-tnscg\" (UID: \"bf0d00a9-64a4-46b9-9bc7-8617b0f3692a\") " pod="openshift-marketplace/redhat-marketplace-tnscg" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.167557 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf0d00a9-64a4-46b9-9bc7-8617b0f3692a-utilities\") pod \"redhat-marketplace-tnscg\" (UID: \"bf0d00a9-64a4-46b9-9bc7-8617b0f3692a\") " pod="openshift-marketplace/redhat-marketplace-tnscg" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.167925 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf0d00a9-64a4-46b9-9bc7-8617b0f3692a-catalog-content\") pod \"redhat-marketplace-tnscg\" (UID: \"bf0d00a9-64a4-46b9-9bc7-8617b0f3692a\") " pod="openshift-marketplace/redhat-marketplace-tnscg" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.189145 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gk2l\" (UniqueName: \"kubernetes.io/projected/bf0d00a9-64a4-46b9-9bc7-8617b0f3692a-kube-api-access-5gk2l\") pod \"redhat-marketplace-tnscg\" (UID: \"bf0d00a9-64a4-46b9-9bc7-8617b0f3692a\") " pod="openshift-marketplace/redhat-marketplace-tnscg" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.201819 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8b9cc\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.277770 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.310556 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tnscg" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.363806 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nqklr"] Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.364782 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nqklr" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.372279 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqklr"] Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.380777 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rbh6x" event={"ID":"21ad3b27-d69d-4db6-a1c6-fac312ad582d","Type":"ContainerStarted","Data":"119aa7b425e1a760f6b86ab52cb7c3033d1b4c6642e7373cf0a0243a4b530651"} Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.381973 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"3aa6843f14bd8bcc53efa308f8b6a4119d76fd609578ded663069c9e8cf77564"} Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.388492 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj" event={"ID":"4fa1c8c4-4ea6-484c-906a-6e7c8016757b","Type":"ContainerDied","Data":"3f6839a6ef6de375728cbc5f1ed622c63055b46d91b766d2029f72a079a9aadc"} Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.388908 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f6839a6ef6de375728cbc5f1ed622c63055b46d91b766d2029f72a079a9aadc" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.389056 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.390731 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"bd3bcdf42fea37f19421ab7a04b43239719164fc81ca13effc0ac97706b80385"} Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.396529 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4f9j" event={"ID":"cdb7ade3-e1b6-436b-a5df-3abb972b72fa","Type":"ContainerStarted","Data":"bbb39d9aede0fdf1ec3e8965106b2d1d950461f5702c54ff1f8badf0f273aac3"} Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.415439 4838 generic.go:334] "Generic (PLEG): container finished" podID="d7b6d22c-5441-4f5c-830b-17d67446352d" containerID="ef5c2acb191b6d35c0620e7e5eb83d390271f42c47881fb33c65b1f6892a0c8c" exitCode=0 Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.415518 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ntc2n" event={"ID":"d7b6d22c-5441-4f5c-830b-17d67446352d","Type":"ContainerDied","Data":"ef5c2acb191b6d35c0620e7e5eb83d390271f42c47881fb33c65b1f6892a0c8c"} Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.415547 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ntc2n" event={"ID":"d7b6d22c-5441-4f5c-830b-17d67446352d","Type":"ContainerStarted","Data":"80492cab5db11167cdeca622da236853af59b6e2c7a2d93c3636c18a275831ba"} Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.417453 4838 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.419662 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"12060e63c5a40fb3110b5385ad5af78855a850e6d5a4e14722b5cfc8bfb4a7c8"} Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.452896 4838 generic.go:334] "Generic (PLEG): container finished" podID="da4cb631-2c1a-4acd-8e10-cf1fbdb099e1" containerID="8603439ecebe2c5e456e6831392539e1ebded39a9d0aa184011c7bfdc4c08abe" exitCode=0 Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.454381 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g2rlc" event={"ID":"da4cb631-2c1a-4acd-8e10-cf1fbdb099e1","Type":"ContainerDied","Data":"8603439ecebe2c5e456e6831392539e1ebded39a9d0aa184011c7bfdc4c08abe"} Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.454417 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g2rlc" event={"ID":"da4cb631-2c1a-4acd-8e10-cf1fbdb099e1","Type":"ContainerStarted","Data":"0118338384c840f8bec0723a20d4883f5207c5c7993adb147f2be73c28e2067d"} Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.467969 4838 patch_prober.go:28] interesting pod/router-default-5444994796-cqh2x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 10:55:56 crc kubenswrapper[4838]: [-]has-synced failed: reason withheld Feb 02 10:55:56 crc kubenswrapper[4838]: [+]process-running ok Feb 02 10:55:56 crc kubenswrapper[4838]: healthz check failed Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.468012 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cqh2x" podUID="f5e8635f-8114-4ba8-b74c-6dd667ef41df" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.470722 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/429f9b8c-ff40-4398-811b-01c8702b65b1-utilities\") pod \"redhat-marketplace-nqklr\" (UID: \"429f9b8c-ff40-4398-811b-01c8702b65b1\") " pod="openshift-marketplace/redhat-marketplace-nqklr" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.470748 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nprlb\" (UniqueName: \"kubernetes.io/projected/429f9b8c-ff40-4398-811b-01c8702b65b1-kube-api-access-nprlb\") pod \"redhat-marketplace-nqklr\" (UID: \"429f9b8c-ff40-4398-811b-01c8702b65b1\") " pod="openshift-marketplace/redhat-marketplace-nqklr" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.470773 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/429f9b8c-ff40-4398-811b-01c8702b65b1-catalog-content\") pod \"redhat-marketplace-nqklr\" (UID: \"429f9b8c-ff40-4398-811b-01c8702b65b1\") " pod="openshift-marketplace/redhat-marketplace-nqklr" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.527872 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.572837 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/429f9b8c-ff40-4398-811b-01c8702b65b1-utilities\") pod \"redhat-marketplace-nqklr\" (UID: \"429f9b8c-ff40-4398-811b-01c8702b65b1\") " pod="openshift-marketplace/redhat-marketplace-nqklr" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.573177 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nprlb\" (UniqueName: \"kubernetes.io/projected/429f9b8c-ff40-4398-811b-01c8702b65b1-kube-api-access-nprlb\") pod \"redhat-marketplace-nqklr\" (UID: \"429f9b8c-ff40-4398-811b-01c8702b65b1\") " pod="openshift-marketplace/redhat-marketplace-nqklr" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.573212 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/429f9b8c-ff40-4398-811b-01c8702b65b1-catalog-content\") pod \"redhat-marketplace-nqklr\" (UID: \"429f9b8c-ff40-4398-811b-01c8702b65b1\") " pod="openshift-marketplace/redhat-marketplace-nqklr" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.576889 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/429f9b8c-ff40-4398-811b-01c8702b65b1-catalog-content\") pod \"redhat-marketplace-nqklr\" (UID: \"429f9b8c-ff40-4398-811b-01c8702b65b1\") " pod="openshift-marketplace/redhat-marketplace-nqklr" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.579499 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/429f9b8c-ff40-4398-811b-01c8702b65b1-utilities\") pod \"redhat-marketplace-nqklr\" (UID: \"429f9b8c-ff40-4398-811b-01c8702b65b1\") " pod="openshift-marketplace/redhat-marketplace-nqklr" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.592127 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nprlb\" (UniqueName: \"kubernetes.io/projected/429f9b8c-ff40-4398-811b-01c8702b65b1-kube-api-access-nprlb\") pod \"redhat-marketplace-nqklr\" (UID: \"429f9b8c-ff40-4398-811b-01c8702b65b1\") " pod="openshift-marketplace/redhat-marketplace-nqklr" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.603809 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8b9cc"] Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.702958 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tnscg"] Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.706909 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nqklr" Feb 02 10:55:56 crc kubenswrapper[4838]: I0202 10:55:56.987797 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqklr"] Feb 02 10:55:56 crc kubenswrapper[4838]: W0202 10:55:56.998215 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod429f9b8c_ff40_4398_811b_01c8702b65b1.slice/crio-7349a65de5361056c4d484831e83d02c940ee3c358535f0a737ced193e141d03 WatchSource:0}: Error finding container 7349a65de5361056c4d484831e83d02c940ee3c358535f0a737ced193e141d03: Status 404 returned error can't find the container with id 7349a65de5361056c4d484831e83d02c940ee3c358535f0a737ced193e141d03 Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.350445 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pl5pl"] Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.351601 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pl5pl" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.353246 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.364410 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pl5pl"] Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.455731 4838 patch_prober.go:28] interesting pod/router-default-5444994796-cqh2x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 10:55:57 crc kubenswrapper[4838]: [-]has-synced failed: reason withheld Feb 02 10:55:57 crc kubenswrapper[4838]: [+]process-running ok Feb 02 10:55:57 crc kubenswrapper[4838]: healthz check failed Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.455775 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cqh2x" podUID="f5e8635f-8114-4ba8-b74c-6dd667ef41df" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.464916 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"36c941a4165e3f4a845232428958ff06d30fb53267ae57b1e3c4c4a74312a463"} Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.467757 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"fd16cc3d5bc0b0b58129b502c983c56c4732bb2ee856ff11a204b682d57e18e6"} Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.467844 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.470678 4838 generic.go:334] "Generic (PLEG): container finished" podID="cdb7ade3-e1b6-436b-a5df-3abb972b72fa" containerID="64440857b2f78a077748d02ca562e0b1d843cf42acb620d2c48b15396ebe0672" exitCode=0 Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.470736 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4f9j" event={"ID":"cdb7ade3-e1b6-436b-a5df-3abb972b72fa","Type":"ContainerDied","Data":"64440857b2f78a077748d02ca562e0b1d843cf42acb620d2c48b15396ebe0672"} Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.473727 4838 generic.go:334] "Generic (PLEG): container finished" podID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" containerID="64f1fdb1daae58afc50ff16c35dd52e00b236820a98593f719611df13be07151" exitCode=0 Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.473764 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tnscg" event={"ID":"bf0d00a9-64a4-46b9-9bc7-8617b0f3692a","Type":"ContainerDied","Data":"64f1fdb1daae58afc50ff16c35dd52e00b236820a98593f719611df13be07151"} Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.473781 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tnscg" event={"ID":"bf0d00a9-64a4-46b9-9bc7-8617b0f3692a","Type":"ContainerStarted","Data":"f122af82990d359563cf6cb5a392a9a1d5bf3f3456bb88e8a521523164e10e4b"} Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.476893 4838 generic.go:334] "Generic (PLEG): container finished" podID="429f9b8c-ff40-4398-811b-01c8702b65b1" containerID="aa6d71a7740a3354d2592a86b8ae8b11e260c3c02b877f7ca362b96f77e89f7f" exitCode=0 Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.476996 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqklr" event={"ID":"429f9b8c-ff40-4398-811b-01c8702b65b1","Type":"ContainerDied","Data":"aa6d71a7740a3354d2592a86b8ae8b11e260c3c02b877f7ca362b96f77e89f7f"} Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.477035 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqklr" event={"ID":"429f9b8c-ff40-4398-811b-01c8702b65b1","Type":"ContainerStarted","Data":"7349a65de5361056c4d484831e83d02c940ee3c358535f0a737ced193e141d03"} Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.482912 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"e5ed8c6a652b9e9433188dab5a84d9f97baaba090c8ffeba048141df165d0ad4"} Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.485223 4838 generic.go:334] "Generic (PLEG): container finished" podID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" containerID="a6729b9ce4581175317d8877173a9d3924e8676fec1c27767d4bb918351af12c" exitCode=0 Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.485316 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rbh6x" event={"ID":"21ad3b27-d69d-4db6-a1c6-fac312ad582d","Type":"ContainerDied","Data":"a6729b9ce4581175317d8877173a9d3924e8676fec1c27767d4bb918351af12c"} Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.495990 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-487wm\" (UniqueName: \"kubernetes.io/projected/193925e0-1419-444b-9e75-ed7371081181-kube-api-access-487wm\") pod \"redhat-operators-pl5pl\" (UID: \"193925e0-1419-444b-9e75-ed7371081181\") " pod="openshift-marketplace/redhat-operators-pl5pl" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.496074 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/193925e0-1419-444b-9e75-ed7371081181-utilities\") pod \"redhat-operators-pl5pl\" (UID: \"193925e0-1419-444b-9e75-ed7371081181\") " pod="openshift-marketplace/redhat-operators-pl5pl" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.496102 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/193925e0-1419-444b-9e75-ed7371081181-catalog-content\") pod \"redhat-operators-pl5pl\" (UID: \"193925e0-1419-444b-9e75-ed7371081181\") " pod="openshift-marketplace/redhat-operators-pl5pl" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.498480 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" event={"ID":"0adb948a-923d-44f9-8cad-f36fe04a90b2","Type":"ContainerStarted","Data":"a65f736fe5dcc893b94052721613ade53a8256e937aa92d8a7633f858161061c"} Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.498545 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" event={"ID":"0adb948a-923d-44f9-8cad-f36fe04a90b2","Type":"ContainerStarted","Data":"04e4d9070542c16aa0e16a60e968e3aa47677349f41f688627c5e3b381b93322"} Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.498564 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.566798 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" podStartSLOduration=127.566777821 podStartE2EDuration="2m7.566777821s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:57.553133111 +0000 UTC m=+151.890234139" watchObservedRunningTime="2026-02-02 10:55:57.566777821 +0000 UTC m=+151.903878849" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.596771 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/193925e0-1419-444b-9e75-ed7371081181-utilities\") pod \"redhat-operators-pl5pl\" (UID: \"193925e0-1419-444b-9e75-ed7371081181\") " pod="openshift-marketplace/redhat-operators-pl5pl" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.597046 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/193925e0-1419-444b-9e75-ed7371081181-catalog-content\") pod \"redhat-operators-pl5pl\" (UID: \"193925e0-1419-444b-9e75-ed7371081181\") " pod="openshift-marketplace/redhat-operators-pl5pl" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.597157 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-487wm\" (UniqueName: \"kubernetes.io/projected/193925e0-1419-444b-9e75-ed7371081181-kube-api-access-487wm\") pod \"redhat-operators-pl5pl\" (UID: \"193925e0-1419-444b-9e75-ed7371081181\") " pod="openshift-marketplace/redhat-operators-pl5pl" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.598573 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/193925e0-1419-444b-9e75-ed7371081181-utilities\") pod \"redhat-operators-pl5pl\" (UID: \"193925e0-1419-444b-9e75-ed7371081181\") " pod="openshift-marketplace/redhat-operators-pl5pl" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.599146 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/193925e0-1419-444b-9e75-ed7371081181-catalog-content\") pod \"redhat-operators-pl5pl\" (UID: \"193925e0-1419-444b-9e75-ed7371081181\") " pod="openshift-marketplace/redhat-operators-pl5pl" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.636351 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-487wm\" (UniqueName: \"kubernetes.io/projected/193925e0-1419-444b-9e75-ed7371081181-kube-api-access-487wm\") pod \"redhat-operators-pl5pl\" (UID: \"193925e0-1419-444b-9e75-ed7371081181\") " pod="openshift-marketplace/redhat-operators-pl5pl" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.702856 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pl5pl" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.753805 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qwkhj"] Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.755005 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qwkhj" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.764706 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qwkhj"] Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.901142 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwqfm\" (UniqueName: \"kubernetes.io/projected/f98a7f3b-5730-4469-aef3-188a9755f566-kube-api-access-wwqfm\") pod \"redhat-operators-qwkhj\" (UID: \"f98a7f3b-5730-4469-aef3-188a9755f566\") " pod="openshift-marketplace/redhat-operators-qwkhj" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.901211 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f98a7f3b-5730-4469-aef3-188a9755f566-utilities\") pod \"redhat-operators-qwkhj\" (UID: \"f98a7f3b-5730-4469-aef3-188a9755f566\") " pod="openshift-marketplace/redhat-operators-qwkhj" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.901237 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f98a7f3b-5730-4469-aef3-188a9755f566-catalog-content\") pod \"redhat-operators-qwkhj\" (UID: \"f98a7f3b-5730-4469-aef3-188a9755f566\") " pod="openshift-marketplace/redhat-operators-qwkhj" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.901688 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.902436 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.904455 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.905064 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.905954 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Feb 02 10:55:57 crc kubenswrapper[4838]: I0202 10:55:57.948360 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pl5pl"] Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.002478 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwqfm\" (UniqueName: \"kubernetes.io/projected/f98a7f3b-5730-4469-aef3-188a9755f566-kube-api-access-wwqfm\") pod \"redhat-operators-qwkhj\" (UID: \"f98a7f3b-5730-4469-aef3-188a9755f566\") " pod="openshift-marketplace/redhat-operators-qwkhj" Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.002549 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f98a7f3b-5730-4469-aef3-188a9755f566-utilities\") pod \"redhat-operators-qwkhj\" (UID: \"f98a7f3b-5730-4469-aef3-188a9755f566\") " pod="openshift-marketplace/redhat-operators-qwkhj" Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.002572 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f98a7f3b-5730-4469-aef3-188a9755f566-catalog-content\") pod \"redhat-operators-qwkhj\" (UID: \"f98a7f3b-5730-4469-aef3-188a9755f566\") " pod="openshift-marketplace/redhat-operators-qwkhj" Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.002605 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e99ef772-a76d-441a-a2f9-32a035023dba-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e99ef772-a76d-441a-a2f9-32a035023dba\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.002641 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e99ef772-a76d-441a-a2f9-32a035023dba-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e99ef772-a76d-441a-a2f9-32a035023dba\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.003428 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f98a7f3b-5730-4469-aef3-188a9755f566-catalog-content\") pod \"redhat-operators-qwkhj\" (UID: \"f98a7f3b-5730-4469-aef3-188a9755f566\") " pod="openshift-marketplace/redhat-operators-qwkhj" Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.004188 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f98a7f3b-5730-4469-aef3-188a9755f566-utilities\") pod \"redhat-operators-qwkhj\" (UID: \"f98a7f3b-5730-4469-aef3-188a9755f566\") " pod="openshift-marketplace/redhat-operators-qwkhj" Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.018972 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwqfm\" (UniqueName: \"kubernetes.io/projected/f98a7f3b-5730-4469-aef3-188a9755f566-kube-api-access-wwqfm\") pod \"redhat-operators-qwkhj\" (UID: \"f98a7f3b-5730-4469-aef3-188a9755f566\") " pod="openshift-marketplace/redhat-operators-qwkhj" Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.098115 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qwkhj" Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.103853 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e99ef772-a76d-441a-a2f9-32a035023dba-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e99ef772-a76d-441a-a2f9-32a035023dba\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.103890 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e99ef772-a76d-441a-a2f9-32a035023dba-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e99ef772-a76d-441a-a2f9-32a035023dba\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.104076 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e99ef772-a76d-441a-a2f9-32a035023dba-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e99ef772-a76d-441a-a2f9-32a035023dba\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.118991 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e99ef772-a76d-441a-a2f9-32a035023dba-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e99ef772-a76d-441a-a2f9-32a035023dba\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.224600 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.349965 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qwkhj"] Feb 02 10:55:58 crc kubenswrapper[4838]: W0202 10:55:58.375246 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf98a7f3b_5730_4469_aef3_188a9755f566.slice/crio-f75e5c9f8c68500780b40c97a17cd8e820d1a5817e9523454a5b83ea87c937d6 WatchSource:0}: Error finding container f75e5c9f8c68500780b40c97a17cd8e820d1a5817e9523454a5b83ea87c937d6: Status 404 returned error can't find the container with id f75e5c9f8c68500780b40c97a17cd8e820d1a5817e9523454a5b83ea87c937d6 Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.456136 4838 patch_prober.go:28] interesting pod/router-default-5444994796-cqh2x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 10:55:58 crc kubenswrapper[4838]: [-]has-synced failed: reason withheld Feb 02 10:55:58 crc kubenswrapper[4838]: [+]process-running ok Feb 02 10:55:58 crc kubenswrapper[4838]: healthz check failed Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.456219 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cqh2x" podUID="f5e8635f-8114-4ba8-b74c-6dd667ef41df" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.460520 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.463323 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.506848 4838 generic.go:334] "Generic (PLEG): container finished" podID="193925e0-1419-444b-9e75-ed7371081181" containerID="ab6cac03f617f2fdb0b4417bb05817eee8926f6fd951af91f6ce45b4e4aeca88" exitCode=0 Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.516356 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pl5pl" event={"ID":"193925e0-1419-444b-9e75-ed7371081181","Type":"ContainerDied","Data":"ab6cac03f617f2fdb0b4417bb05817eee8926f6fd951af91f6ce45b4e4aeca88"} Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.516391 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pl5pl" event={"ID":"193925e0-1419-444b-9e75-ed7371081181","Type":"ContainerStarted","Data":"cb0fd35d05f1c52119d9e23e4cfa190b26a8d7fdb76b2626e610cd8ddcb3611a"} Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.516403 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qwkhj" event={"ID":"f98a7f3b-5730-4469-aef3-188a9755f566","Type":"ContainerStarted","Data":"f75e5c9f8c68500780b40c97a17cd8e820d1a5817e9523454a5b83ea87c937d6"} Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.990808 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.991112 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.997574 4838 patch_prober.go:28] interesting pod/console-f9d7485db-2qsm5 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.11:8443/health\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.997643 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-2qsm5" podUID="3e879c3f-8c95-449b-b9e7-439c78f48209" containerName="console" probeResult="failure" output="Get \"https://10.217.0.11:8443/health\": dial tcp 10.217.0.11:8443: connect: connection refused" Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.997911 4838 patch_prober.go:28] interesting pod/downloads-7954f5f757-mzp9s container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.997972 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-mzp9s" podUID="ed8b29bf-e322-4cc9-b027-0aea680ce349" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.998143 4838 patch_prober.go:28] interesting pod/downloads-7954f5f757-mzp9s container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Feb 02 10:55:58 crc kubenswrapper[4838]: I0202 10:55:58.998174 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-mzp9s" podUID="ed8b29bf-e322-4cc9-b027-0aea680ce349" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Feb 02 10:55:59 crc kubenswrapper[4838]: I0202 10:55:59.014437 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:59 crc kubenswrapper[4838]: I0202 10:55:59.018318 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-zfr2j" Feb 02 10:55:59 crc kubenswrapper[4838]: I0202 10:55:59.452165 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-cqh2x" Feb 02 10:55:59 crc kubenswrapper[4838]: I0202 10:55:59.457840 4838 patch_prober.go:28] interesting pod/router-default-5444994796-cqh2x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 10:55:59 crc kubenswrapper[4838]: [-]has-synced failed: reason withheld Feb 02 10:55:59 crc kubenswrapper[4838]: [+]process-running ok Feb 02 10:55:59 crc kubenswrapper[4838]: healthz check failed Feb 02 10:55:59 crc kubenswrapper[4838]: I0202 10:55:59.457899 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cqh2x" podUID="f5e8635f-8114-4ba8-b74c-6dd667ef41df" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 10:55:59 crc kubenswrapper[4838]: I0202 10:55:59.538542 4838 generic.go:334] "Generic (PLEG): container finished" podID="f98a7f3b-5730-4469-aef3-188a9755f566" containerID="457e340060e298a95c675e0ea2a8427004bc52f326921279a81f22731d8ea978" exitCode=0 Feb 02 10:55:59 crc kubenswrapper[4838]: I0202 10:55:59.538709 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qwkhj" event={"ID":"f98a7f3b-5730-4469-aef3-188a9755f566","Type":"ContainerDied","Data":"457e340060e298a95c675e0ea2a8427004bc52f326921279a81f22731d8ea978"} Feb 02 10:55:59 crc kubenswrapper[4838]: I0202 10:55:59.542956 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"e99ef772-a76d-441a-a2f9-32a035023dba","Type":"ContainerStarted","Data":"005f07fd21e9054fe9d9b69d33fd78e53ff96c8241257a9ce30972e8880b9dfb"} Feb 02 10:55:59 crc kubenswrapper[4838]: I0202 10:55:59.543098 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"e99ef772-a76d-441a-a2f9-32a035023dba","Type":"ContainerStarted","Data":"822be6cca4305fe5e871b3ceed6563eb148d495c9eb1287b854226b0e7677ab0"} Feb 02 10:55:59 crc kubenswrapper[4838]: I0202 10:55:59.584888 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.584861931 podStartE2EDuration="2.584861931s" podCreationTimestamp="2026-02-02 10:55:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:55:59.577187146 +0000 UTC m=+153.914288174" watchObservedRunningTime="2026-02-02 10:55:59.584861931 +0000 UTC m=+153.921962959" Feb 02 10:56:00 crc kubenswrapper[4838]: I0202 10:56:00.455769 4838 patch_prober.go:28] interesting pod/router-default-5444994796-cqh2x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 10:56:00 crc kubenswrapper[4838]: [-]has-synced failed: reason withheld Feb 02 10:56:00 crc kubenswrapper[4838]: [+]process-running ok Feb 02 10:56:00 crc kubenswrapper[4838]: healthz check failed Feb 02 10:56:00 crc kubenswrapper[4838]: I0202 10:56:00.456412 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cqh2x" podUID="f5e8635f-8114-4ba8-b74c-6dd667ef41df" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 10:56:00 crc kubenswrapper[4838]: I0202 10:56:00.562771 4838 generic.go:334] "Generic (PLEG): container finished" podID="e99ef772-a76d-441a-a2f9-32a035023dba" containerID="005f07fd21e9054fe9d9b69d33fd78e53ff96c8241257a9ce30972e8880b9dfb" exitCode=0 Feb 02 10:56:00 crc kubenswrapper[4838]: I0202 10:56:00.562821 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"e99ef772-a76d-441a-a2f9-32a035023dba","Type":"ContainerDied","Data":"005f07fd21e9054fe9d9b69d33fd78e53ff96c8241257a9ce30972e8880b9dfb"} Feb 02 10:56:01 crc kubenswrapper[4838]: I0202 10:56:01.342890 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-sz4zx" Feb 02 10:56:01 crc kubenswrapper[4838]: I0202 10:56:01.453413 4838 patch_prober.go:28] interesting pod/router-default-5444994796-cqh2x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 10:56:01 crc kubenswrapper[4838]: [-]has-synced failed: reason withheld Feb 02 10:56:01 crc kubenswrapper[4838]: [+]process-running ok Feb 02 10:56:01 crc kubenswrapper[4838]: healthz check failed Feb 02 10:56:01 crc kubenswrapper[4838]: I0202 10:56:01.453461 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cqh2x" podUID="f5e8635f-8114-4ba8-b74c-6dd667ef41df" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 10:56:01 crc kubenswrapper[4838]: I0202 10:56:01.660315 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 02 10:56:01 crc kubenswrapper[4838]: I0202 10:56:01.660923 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 10:56:01 crc kubenswrapper[4838]: I0202 10:56:01.664119 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Feb 02 10:56:01 crc kubenswrapper[4838]: I0202 10:56:01.664301 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Feb 02 10:56:01 crc kubenswrapper[4838]: I0202 10:56:01.680789 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 02 10:56:01 crc kubenswrapper[4838]: I0202 10:56:01.771127 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/86017b1b-8411-4ee3-bffd-d71c26b535fb-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"86017b1b-8411-4ee3-bffd-d71c26b535fb\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 10:56:01 crc kubenswrapper[4838]: I0202 10:56:01.771937 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/86017b1b-8411-4ee3-bffd-d71c26b535fb-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"86017b1b-8411-4ee3-bffd-d71c26b535fb\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 10:56:01 crc kubenswrapper[4838]: I0202 10:56:01.872986 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/86017b1b-8411-4ee3-bffd-d71c26b535fb-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"86017b1b-8411-4ee3-bffd-d71c26b535fb\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 10:56:01 crc kubenswrapper[4838]: I0202 10:56:01.873074 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/86017b1b-8411-4ee3-bffd-d71c26b535fb-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"86017b1b-8411-4ee3-bffd-d71c26b535fb\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 10:56:01 crc kubenswrapper[4838]: I0202 10:56:01.873932 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/86017b1b-8411-4ee3-bffd-d71c26b535fb-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"86017b1b-8411-4ee3-bffd-d71c26b535fb\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 10:56:01 crc kubenswrapper[4838]: I0202 10:56:01.911845 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/86017b1b-8411-4ee3-bffd-d71c26b535fb-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"86017b1b-8411-4ee3-bffd-d71c26b535fb\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 10:56:01 crc kubenswrapper[4838]: I0202 10:56:01.991189 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 10:56:02 crc kubenswrapper[4838]: I0202 10:56:02.455661 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-cqh2x" Feb 02 10:56:02 crc kubenswrapper[4838]: I0202 10:56:02.464094 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-cqh2x" Feb 02 10:56:08 crc kubenswrapper[4838]: I0202 10:56:08.997188 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:56:08 crc kubenswrapper[4838]: I0202 10:56:08.998759 4838 patch_prober.go:28] interesting pod/downloads-7954f5f757-mzp9s container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Feb 02 10:56:08 crc kubenswrapper[4838]: I0202 10:56:08.998823 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-mzp9s" podUID="ed8b29bf-e322-4cc9-b027-0aea680ce349" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Feb 02 10:56:08 crc kubenswrapper[4838]: I0202 10:56:08.999796 4838 patch_prober.go:28] interesting pod/downloads-7954f5f757-mzp9s container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Feb 02 10:56:08 crc kubenswrapper[4838]: I0202 10:56:08.999863 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-mzp9s" podUID="ed8b29bf-e322-4cc9-b027-0aea680ce349" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Feb 02 10:56:09 crc kubenswrapper[4838]: I0202 10:56:09.004962 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 10:56:13 crc kubenswrapper[4838]: I0202 10:56:13.071697 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 10:56:13 crc kubenswrapper[4838]: I0202 10:56:13.136544 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e99ef772-a76d-441a-a2f9-32a035023dba-kubelet-dir\") pod \"e99ef772-a76d-441a-a2f9-32a035023dba\" (UID: \"e99ef772-a76d-441a-a2f9-32a035023dba\") " Feb 02 10:56:13 crc kubenswrapper[4838]: I0202 10:56:13.136695 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e99ef772-a76d-441a-a2f9-32a035023dba-kube-api-access\") pod \"e99ef772-a76d-441a-a2f9-32a035023dba\" (UID: \"e99ef772-a76d-441a-a2f9-32a035023dba\") " Feb 02 10:56:13 crc kubenswrapper[4838]: I0202 10:56:13.136743 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e99ef772-a76d-441a-a2f9-32a035023dba-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "e99ef772-a76d-441a-a2f9-32a035023dba" (UID: "e99ef772-a76d-441a-a2f9-32a035023dba"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 10:56:13 crc kubenswrapper[4838]: I0202 10:56:13.137041 4838 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e99ef772-a76d-441a-a2f9-32a035023dba-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 02 10:56:13 crc kubenswrapper[4838]: I0202 10:56:13.149922 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e99ef772-a76d-441a-a2f9-32a035023dba-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e99ef772-a76d-441a-a2f9-32a035023dba" (UID: "e99ef772-a76d-441a-a2f9-32a035023dba"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:56:13 crc kubenswrapper[4838]: I0202 10:56:13.238898 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e99ef772-a76d-441a-a2f9-32a035023dba-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 10:56:13 crc kubenswrapper[4838]: I0202 10:56:13.542209 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs\") pod \"network-metrics-daemon-kdnnp\" (UID: \"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\") " pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:56:13 crc kubenswrapper[4838]: I0202 10:56:13.549511 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba-metrics-certs\") pod \"network-metrics-daemon-kdnnp\" (UID: \"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba\") " pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:56:13 crc kubenswrapper[4838]: I0202 10:56:13.564764 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-w48jn"] Feb 02 10:56:13 crc kubenswrapper[4838]: I0202 10:56:13.565069 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" podUID="db54cce9-ff9d-4772-abf3-01f15ecb8075" containerName="controller-manager" containerID="cri-o://37b6c8c5c9804f2ddcfcf2a242353706afa270f96a916bdf65e4253961bc63cb" gracePeriod=30 Feb 02 10:56:13 crc kubenswrapper[4838]: I0202 10:56:13.590715 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj"] Feb 02 10:56:13 crc kubenswrapper[4838]: I0202 10:56:13.590951 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" podUID="92924f81-e588-47e1-84d1-766c9774f6d1" containerName="route-controller-manager" containerID="cri-o://ff4739a8a737120517fdd5fdfee34e7b4f5b672dd3fa24c74ac417bdb3ea9476" gracePeriod=30 Feb 02 10:56:13 crc kubenswrapper[4838]: I0202 10:56:13.683675 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 10:56:13 crc kubenswrapper[4838]: I0202 10:56:13.683602 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"e99ef772-a76d-441a-a2f9-32a035023dba","Type":"ContainerDied","Data":"822be6cca4305fe5e871b3ceed6563eb148d495c9eb1287b854226b0e7677ab0"} Feb 02 10:56:13 crc kubenswrapper[4838]: I0202 10:56:13.683720 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="822be6cca4305fe5e871b3ceed6563eb148d495c9eb1287b854226b0e7677ab0" Feb 02 10:56:13 crc kubenswrapper[4838]: I0202 10:56:13.831201 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kdnnp" Feb 02 10:56:14 crc kubenswrapper[4838]: I0202 10:56:14.691417 4838 generic.go:334] "Generic (PLEG): container finished" podID="db54cce9-ff9d-4772-abf3-01f15ecb8075" containerID="37b6c8c5c9804f2ddcfcf2a242353706afa270f96a916bdf65e4253961bc63cb" exitCode=0 Feb 02 10:56:14 crc kubenswrapper[4838]: I0202 10:56:14.691522 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" event={"ID":"db54cce9-ff9d-4772-abf3-01f15ecb8075","Type":"ContainerDied","Data":"37b6c8c5c9804f2ddcfcf2a242353706afa270f96a916bdf65e4253961bc63cb"} Feb 02 10:56:14 crc kubenswrapper[4838]: I0202 10:56:14.693171 4838 generic.go:334] "Generic (PLEG): container finished" podID="92924f81-e588-47e1-84d1-766c9774f6d1" containerID="ff4739a8a737120517fdd5fdfee34e7b4f5b672dd3fa24c74ac417bdb3ea9476" exitCode=0 Feb 02 10:56:14 crc kubenswrapper[4838]: I0202 10:56:14.693235 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" event={"ID":"92924f81-e588-47e1-84d1-766c9774f6d1","Type":"ContainerDied","Data":"ff4739a8a737120517fdd5fdfee34e7b4f5b672dd3fa24c74ac417bdb3ea9476"} Feb 02 10:56:15 crc kubenswrapper[4838]: I0202 10:56:15.430210 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 10:56:15 crc kubenswrapper[4838]: I0202 10:56:15.430279 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 10:56:16 crc kubenswrapper[4838]: I0202 10:56:16.283428 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 10:56:18 crc kubenswrapper[4838]: I0202 10:56:18.324602 4838 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-w48jn container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Feb 02 10:56:18 crc kubenswrapper[4838]: I0202 10:56:18.324988 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" podUID="db54cce9-ff9d-4772-abf3-01f15ecb8075" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Feb 02 10:56:18 crc kubenswrapper[4838]: I0202 10:56:18.455417 4838 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-z6khj container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Feb 02 10:56:18 crc kubenswrapper[4838]: I0202 10:56:18.455476 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" podUID="92924f81-e588-47e1-84d1-766c9774f6d1" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Feb 02 10:56:19 crc kubenswrapper[4838]: I0202 10:56:19.020520 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-mzp9s" Feb 02 10:56:28 crc kubenswrapper[4838]: I0202 10:56:28.456079 4838 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-z6khj container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Feb 02 10:56:28 crc kubenswrapper[4838]: I0202 10:56:28.456810 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" podUID="92924f81-e588-47e1-84d1-766c9774f6d1" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Feb 02 10:56:29 crc kubenswrapper[4838]: I0202 10:56:29.324736 4838 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-w48jn container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 02 10:56:29 crc kubenswrapper[4838]: I0202 10:56:29.324879 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" podUID="db54cce9-ff9d-4772-abf3-01f15ecb8075" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 02 10:56:29 crc kubenswrapper[4838]: I0202 10:56:29.506480 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7bcl9" Feb 02 10:56:31 crc kubenswrapper[4838]: E0202 10:56:31.563739 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Feb 02 10:56:31 crc kubenswrapper[4838]: E0202 10:56:31.564111 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h4fv8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-m4f9j_openshift-marketplace(cdb7ade3-e1b6-436b-a5df-3abb972b72fa): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 10:56:31 crc kubenswrapper[4838]: E0202 10:56:31.565372 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-m4f9j" podUID="cdb7ade3-e1b6-436b-a5df-3abb972b72fa" Feb 02 10:56:34 crc kubenswrapper[4838]: E0202 10:56:34.275336 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-m4f9j" podUID="cdb7ade3-e1b6-436b-a5df-3abb972b72fa" Feb 02 10:56:35 crc kubenswrapper[4838]: I0202 10:56:35.676353 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 02 10:56:35 crc kubenswrapper[4838]: E0202 10:56:35.676745 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e99ef772-a76d-441a-a2f9-32a035023dba" containerName="pruner" Feb 02 10:56:35 crc kubenswrapper[4838]: I0202 10:56:35.676762 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="e99ef772-a76d-441a-a2f9-32a035023dba" containerName="pruner" Feb 02 10:56:35 crc kubenswrapper[4838]: I0202 10:56:35.676939 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="e99ef772-a76d-441a-a2f9-32a035023dba" containerName="pruner" Feb 02 10:56:35 crc kubenswrapper[4838]: I0202 10:56:35.677508 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 10:56:35 crc kubenswrapper[4838]: I0202 10:56:35.682942 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 02 10:56:35 crc kubenswrapper[4838]: I0202 10:56:35.793217 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c5a4810d-7433-4fe3-a231-a9ba2a238ce1-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c5a4810d-7433-4fe3-a231-a9ba2a238ce1\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 10:56:35 crc kubenswrapper[4838]: I0202 10:56:35.793291 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c5a4810d-7433-4fe3-a231-a9ba2a238ce1-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c5a4810d-7433-4fe3-a231-a9ba2a238ce1\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 10:56:35 crc kubenswrapper[4838]: I0202 10:56:35.894392 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c5a4810d-7433-4fe3-a231-a9ba2a238ce1-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c5a4810d-7433-4fe3-a231-a9ba2a238ce1\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 10:56:35 crc kubenswrapper[4838]: I0202 10:56:35.894447 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c5a4810d-7433-4fe3-a231-a9ba2a238ce1-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c5a4810d-7433-4fe3-a231-a9ba2a238ce1\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 10:56:35 crc kubenswrapper[4838]: I0202 10:56:35.894521 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c5a4810d-7433-4fe3-a231-a9ba2a238ce1-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c5a4810d-7433-4fe3-a231-a9ba2a238ce1\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 10:56:35 crc kubenswrapper[4838]: I0202 10:56:35.917636 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c5a4810d-7433-4fe3-a231-a9ba2a238ce1-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c5a4810d-7433-4fe3-a231-a9ba2a238ce1\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 10:56:36 crc kubenswrapper[4838]: I0202 10:56:36.015910 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 10:56:36 crc kubenswrapper[4838]: I0202 10:56:36.044076 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 10:56:37 crc kubenswrapper[4838]: E0202 10:56:37.769725 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Feb 02 10:56:37 crc kubenswrapper[4838]: E0202 10:56:37.770333 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l6sqx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-ntc2n_openshift-marketplace(d7b6d22c-5441-4f5c-830b-17d67446352d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 10:56:37 crc kubenswrapper[4838]: E0202 10:56:37.771946 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-ntc2n" podUID="d7b6d22c-5441-4f5c-830b-17d67446352d" Feb 02 10:56:38 crc kubenswrapper[4838]: E0202 10:56:38.353884 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-ntc2n" podUID="d7b6d22c-5441-4f5c-830b-17d67446352d" Feb 02 10:56:38 crc kubenswrapper[4838]: I0202 10:56:38.461629 4838 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-z6khj container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Feb 02 10:56:38 crc kubenswrapper[4838]: I0202 10:56:38.461752 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" podUID="92924f81-e588-47e1-84d1-766c9774f6d1" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Feb 02 10:56:39 crc kubenswrapper[4838]: I0202 10:56:39.324725 4838 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-w48jn container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 02 10:56:39 crc kubenswrapper[4838]: I0202 10:56:39.325219 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" podUID="db54cce9-ff9d-4772-abf3-01f15ecb8075" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 02 10:56:39 crc kubenswrapper[4838]: E0202 10:56:39.397813 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Feb 02 10:56:39 crc kubenswrapper[4838]: E0202 10:56:39.398445 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nprlb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-nqklr_openshift-marketplace(429f9b8c-ff40-4398-811b-01c8702b65b1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 10:56:39 crc kubenswrapper[4838]: E0202 10:56:39.399710 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-nqklr" podUID="429f9b8c-ff40-4398-811b-01c8702b65b1" Feb 02 10:56:41 crc kubenswrapper[4838]: I0202 10:56:41.258316 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 02 10:56:41 crc kubenswrapper[4838]: I0202 10:56:41.259272 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 02 10:56:41 crc kubenswrapper[4838]: I0202 10:56:41.269314 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 02 10:56:41 crc kubenswrapper[4838]: I0202 10:56:41.283549 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2a16c769-012d-4a30-b9db-5629ed018ef8-var-lock\") pod \"installer-9-crc\" (UID: \"2a16c769-012d-4a30-b9db-5629ed018ef8\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 10:56:41 crc kubenswrapper[4838]: I0202 10:56:41.283675 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a16c769-012d-4a30-b9db-5629ed018ef8-kube-api-access\") pod \"installer-9-crc\" (UID: \"2a16c769-012d-4a30-b9db-5629ed018ef8\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 10:56:41 crc kubenswrapper[4838]: I0202 10:56:41.283749 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2a16c769-012d-4a30-b9db-5629ed018ef8-kubelet-dir\") pod \"installer-9-crc\" (UID: \"2a16c769-012d-4a30-b9db-5629ed018ef8\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 10:56:41 crc kubenswrapper[4838]: I0202 10:56:41.385483 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2a16c769-012d-4a30-b9db-5629ed018ef8-var-lock\") pod \"installer-9-crc\" (UID: \"2a16c769-012d-4a30-b9db-5629ed018ef8\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 10:56:41 crc kubenswrapper[4838]: I0202 10:56:41.385538 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a16c769-012d-4a30-b9db-5629ed018ef8-kube-api-access\") pod \"installer-9-crc\" (UID: \"2a16c769-012d-4a30-b9db-5629ed018ef8\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 10:56:41 crc kubenswrapper[4838]: I0202 10:56:41.385578 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2a16c769-012d-4a30-b9db-5629ed018ef8-kubelet-dir\") pod \"installer-9-crc\" (UID: \"2a16c769-012d-4a30-b9db-5629ed018ef8\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 10:56:41 crc kubenswrapper[4838]: I0202 10:56:41.385669 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2a16c769-012d-4a30-b9db-5629ed018ef8-var-lock\") pod \"installer-9-crc\" (UID: \"2a16c769-012d-4a30-b9db-5629ed018ef8\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 10:56:41 crc kubenswrapper[4838]: I0202 10:56:41.385689 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2a16c769-012d-4a30-b9db-5629ed018ef8-kubelet-dir\") pod \"installer-9-crc\" (UID: \"2a16c769-012d-4a30-b9db-5629ed018ef8\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 10:56:41 crc kubenswrapper[4838]: I0202 10:56:41.411054 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a16c769-012d-4a30-b9db-5629ed018ef8-kube-api-access\") pod \"installer-9-crc\" (UID: \"2a16c769-012d-4a30-b9db-5629ed018ef8\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 10:56:41 crc kubenswrapper[4838]: I0202 10:56:41.598423 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 02 10:56:44 crc kubenswrapper[4838]: E0202 10:56:44.398569 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-nqklr" podUID="429f9b8c-ff40-4398-811b-01c8702b65b1" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.529551 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.541549 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.580532 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-845fd9f844-btlt9"] Feb 02 10:56:44 crc kubenswrapper[4838]: E0202 10:56:44.580837 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db54cce9-ff9d-4772-abf3-01f15ecb8075" containerName="controller-manager" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.580850 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="db54cce9-ff9d-4772-abf3-01f15ecb8075" containerName="controller-manager" Feb 02 10:56:44 crc kubenswrapper[4838]: E0202 10:56:44.580863 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92924f81-e588-47e1-84d1-766c9774f6d1" containerName="route-controller-manager" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.580870 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="92924f81-e588-47e1-84d1-766c9774f6d1" containerName="route-controller-manager" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.580987 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="92924f81-e588-47e1-84d1-766c9774f6d1" containerName="route-controller-manager" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.580996 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="db54cce9-ff9d-4772-abf3-01f15ecb8075" containerName="controller-manager" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.581317 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-845fd9f844-btlt9"] Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.581430 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.631876 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92924f81-e588-47e1-84d1-766c9774f6d1-serving-cert\") pod \"92924f81-e588-47e1-84d1-766c9774f6d1\" (UID: \"92924f81-e588-47e1-84d1-766c9774f6d1\") " Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.631927 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92924f81-e588-47e1-84d1-766c9774f6d1-config\") pod \"92924f81-e588-47e1-84d1-766c9774f6d1\" (UID: \"92924f81-e588-47e1-84d1-766c9774f6d1\") " Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.631954 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/92924f81-e588-47e1-84d1-766c9774f6d1-client-ca\") pod \"92924f81-e588-47e1-84d1-766c9774f6d1\" (UID: \"92924f81-e588-47e1-84d1-766c9774f6d1\") " Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.632059 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssfzm\" (UniqueName: \"kubernetes.io/projected/db54cce9-ff9d-4772-abf3-01f15ecb8075-kube-api-access-ssfzm\") pod \"db54cce9-ff9d-4772-abf3-01f15ecb8075\" (UID: \"db54cce9-ff9d-4772-abf3-01f15ecb8075\") " Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.632084 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db54cce9-ff9d-4772-abf3-01f15ecb8075-config\") pod \"db54cce9-ff9d-4772-abf3-01f15ecb8075\" (UID: \"db54cce9-ff9d-4772-abf3-01f15ecb8075\") " Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.632107 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/db54cce9-ff9d-4772-abf3-01f15ecb8075-proxy-ca-bundles\") pod \"db54cce9-ff9d-4772-abf3-01f15ecb8075\" (UID: \"db54cce9-ff9d-4772-abf3-01f15ecb8075\") " Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.632120 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fss4g\" (UniqueName: \"kubernetes.io/projected/92924f81-e588-47e1-84d1-766c9774f6d1-kube-api-access-fss4g\") pod \"92924f81-e588-47e1-84d1-766c9774f6d1\" (UID: \"92924f81-e588-47e1-84d1-766c9774f6d1\") " Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.632135 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/db54cce9-ff9d-4772-abf3-01f15ecb8075-client-ca\") pod \"db54cce9-ff9d-4772-abf3-01f15ecb8075\" (UID: \"db54cce9-ff9d-4772-abf3-01f15ecb8075\") " Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.632162 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db54cce9-ff9d-4772-abf3-01f15ecb8075-serving-cert\") pod \"db54cce9-ff9d-4772-abf3-01f15ecb8075\" (UID: \"db54cce9-ff9d-4772-abf3-01f15ecb8075\") " Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.632246 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7c0d0e39-41fa-479b-b19f-9897ffd25de8-client-ca\") pod \"controller-manager-845fd9f844-btlt9\" (UID: \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\") " pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.632272 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lf7ks\" (UniqueName: \"kubernetes.io/projected/7c0d0e39-41fa-479b-b19f-9897ffd25de8-kube-api-access-lf7ks\") pod \"controller-manager-845fd9f844-btlt9\" (UID: \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\") " pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.632293 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c0d0e39-41fa-479b-b19f-9897ffd25de8-proxy-ca-bundles\") pod \"controller-manager-845fd9f844-btlt9\" (UID: \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\") " pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.632314 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c0d0e39-41fa-479b-b19f-9897ffd25de8-serving-cert\") pod \"controller-manager-845fd9f844-btlt9\" (UID: \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\") " pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.632349 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c0d0e39-41fa-479b-b19f-9897ffd25de8-config\") pod \"controller-manager-845fd9f844-btlt9\" (UID: \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\") " pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.633154 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92924f81-e588-47e1-84d1-766c9774f6d1-client-ca" (OuterVolumeSpecName: "client-ca") pod "92924f81-e588-47e1-84d1-766c9774f6d1" (UID: "92924f81-e588-47e1-84d1-766c9774f6d1"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.633464 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92924f81-e588-47e1-84d1-766c9774f6d1-config" (OuterVolumeSpecName: "config") pod "92924f81-e588-47e1-84d1-766c9774f6d1" (UID: "92924f81-e588-47e1-84d1-766c9774f6d1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.633951 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db54cce9-ff9d-4772-abf3-01f15ecb8075-client-ca" (OuterVolumeSpecName: "client-ca") pod "db54cce9-ff9d-4772-abf3-01f15ecb8075" (UID: "db54cce9-ff9d-4772-abf3-01f15ecb8075"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.633991 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db54cce9-ff9d-4772-abf3-01f15ecb8075-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "db54cce9-ff9d-4772-abf3-01f15ecb8075" (UID: "db54cce9-ff9d-4772-abf3-01f15ecb8075"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.634069 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db54cce9-ff9d-4772-abf3-01f15ecb8075-config" (OuterVolumeSpecName: "config") pod "db54cce9-ff9d-4772-abf3-01f15ecb8075" (UID: "db54cce9-ff9d-4772-abf3-01f15ecb8075"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.638857 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db54cce9-ff9d-4772-abf3-01f15ecb8075-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "db54cce9-ff9d-4772-abf3-01f15ecb8075" (UID: "db54cce9-ff9d-4772-abf3-01f15ecb8075"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.643084 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92924f81-e588-47e1-84d1-766c9774f6d1-kube-api-access-fss4g" (OuterVolumeSpecName: "kube-api-access-fss4g") pod "92924f81-e588-47e1-84d1-766c9774f6d1" (UID: "92924f81-e588-47e1-84d1-766c9774f6d1"). InnerVolumeSpecName "kube-api-access-fss4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.644065 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db54cce9-ff9d-4772-abf3-01f15ecb8075-kube-api-access-ssfzm" (OuterVolumeSpecName: "kube-api-access-ssfzm") pod "db54cce9-ff9d-4772-abf3-01f15ecb8075" (UID: "db54cce9-ff9d-4772-abf3-01f15ecb8075"). InnerVolumeSpecName "kube-api-access-ssfzm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.649815 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92924f81-e588-47e1-84d1-766c9774f6d1-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "92924f81-e588-47e1-84d1-766c9774f6d1" (UID: "92924f81-e588-47e1-84d1-766c9774f6d1"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.733167 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7c0d0e39-41fa-479b-b19f-9897ffd25de8-client-ca\") pod \"controller-manager-845fd9f844-btlt9\" (UID: \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\") " pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.733222 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lf7ks\" (UniqueName: \"kubernetes.io/projected/7c0d0e39-41fa-479b-b19f-9897ffd25de8-kube-api-access-lf7ks\") pod \"controller-manager-845fd9f844-btlt9\" (UID: \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\") " pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.733249 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c0d0e39-41fa-479b-b19f-9897ffd25de8-proxy-ca-bundles\") pod \"controller-manager-845fd9f844-btlt9\" (UID: \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\") " pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.733271 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c0d0e39-41fa-479b-b19f-9897ffd25de8-serving-cert\") pod \"controller-manager-845fd9f844-btlt9\" (UID: \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\") " pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.733314 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c0d0e39-41fa-479b-b19f-9897ffd25de8-config\") pod \"controller-manager-845fd9f844-btlt9\" (UID: \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\") " pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.733376 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92924f81-e588-47e1-84d1-766c9774f6d1-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.733388 4838 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/92924f81-e588-47e1-84d1-766c9774f6d1-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.733398 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssfzm\" (UniqueName: \"kubernetes.io/projected/db54cce9-ff9d-4772-abf3-01f15ecb8075-kube-api-access-ssfzm\") on node \"crc\" DevicePath \"\"" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.733408 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db54cce9-ff9d-4772-abf3-01f15ecb8075-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.733418 4838 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/db54cce9-ff9d-4772-abf3-01f15ecb8075-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.733427 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fss4g\" (UniqueName: \"kubernetes.io/projected/92924f81-e588-47e1-84d1-766c9774f6d1-kube-api-access-fss4g\") on node \"crc\" DevicePath \"\"" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.733439 4838 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/db54cce9-ff9d-4772-abf3-01f15ecb8075-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.733451 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db54cce9-ff9d-4772-abf3-01f15ecb8075-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.733462 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92924f81-e588-47e1-84d1-766c9774f6d1-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.734398 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7c0d0e39-41fa-479b-b19f-9897ffd25de8-client-ca\") pod \"controller-manager-845fd9f844-btlt9\" (UID: \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\") " pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.734692 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c0d0e39-41fa-479b-b19f-9897ffd25de8-config\") pod \"controller-manager-845fd9f844-btlt9\" (UID: \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\") " pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.734896 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c0d0e39-41fa-479b-b19f-9897ffd25de8-proxy-ca-bundles\") pod \"controller-manager-845fd9f844-btlt9\" (UID: \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\") " pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.737776 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c0d0e39-41fa-479b-b19f-9897ffd25de8-serving-cert\") pod \"controller-manager-845fd9f844-btlt9\" (UID: \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\") " pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.747572 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lf7ks\" (UniqueName: \"kubernetes.io/projected/7c0d0e39-41fa-479b-b19f-9897ffd25de8-kube-api-access-lf7ks\") pod \"controller-manager-845fd9f844-btlt9\" (UID: \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\") " pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" Feb 02 10:56:44 crc kubenswrapper[4838]: E0202 10:56:44.814533 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Feb 02 10:56:44 crc kubenswrapper[4838]: E0202 10:56:44.814727 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wwqfm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-qwkhj_openshift-marketplace(f98a7f3b-5730-4469-aef3-188a9755f566): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 10:56:44 crc kubenswrapper[4838]: E0202 10:56:44.815913 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-qwkhj" podUID="f98a7f3b-5730-4469-aef3-188a9755f566" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.828920 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.891968 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c5a4810d-7433-4fe3-a231-a9ba2a238ce1","Type":"ContainerStarted","Data":"d339bc6660ebedb96049cfc928bcfe191d833f79d0392d084183d09a884779e3"} Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.899560 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.899555 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-w48jn" event={"ID":"db54cce9-ff9d-4772-abf3-01f15ecb8075","Type":"ContainerDied","Data":"2a9efacab3eb58e6c1129fd92b58c41f9202f07dcb955d4e14bf300156f4bb3f"} Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.899679 4838 scope.go:117] "RemoveContainer" containerID="37b6c8c5c9804f2ddcfcf2a242353706afa270f96a916bdf65e4253961bc63cb" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.915805 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.919501 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.921395 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.921389 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj" event={"ID":"92924f81-e588-47e1-84d1-766c9774f6d1","Type":"ContainerDied","Data":"d313cf6afd43dd1de7c3c3dfc38b0bb2788b5f5836deb354943afe4e8538d1de"} Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.959422 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-kdnnp"] Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.965912 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.969051 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj"] Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.973290 4838 scope.go:117] "RemoveContainer" containerID="ff4739a8a737120517fdd5fdfee34e7b4f5b672dd3fa24c74ac417bdb3ea9476" Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.973416 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-z6khj"] Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.976883 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-w48jn"] Feb 02 10:56:44 crc kubenswrapper[4838]: W0202 10:56:44.977120 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc5fedaf5_8b51_4d67_b0b2_1709adf5e2ba.slice/crio-2cd458015be685958a02d8b9cff07936b3b6946653ef24cef5413a647d6166a5 WatchSource:0}: Error finding container 2cd458015be685958a02d8b9cff07936b3b6946653ef24cef5413a647d6166a5: Status 404 returned error can't find the container with id 2cd458015be685958a02d8b9cff07936b3b6946653ef24cef5413a647d6166a5 Feb 02 10:56:44 crc kubenswrapper[4838]: E0202 10:56:44.977714 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-qwkhj" podUID="f98a7f3b-5730-4469-aef3-188a9755f566" Feb 02 10:56:44 crc kubenswrapper[4838]: W0202 10:56:44.978275 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod2a16c769_012d_4a30_b9db_5629ed018ef8.slice/crio-517725d0e05c24deecf3233357cc05eb6511e22587a9417728dceec73851bacf WatchSource:0}: Error finding container 517725d0e05c24deecf3233357cc05eb6511e22587a9417728dceec73851bacf: Status 404 returned error can't find the container with id 517725d0e05c24deecf3233357cc05eb6511e22587a9417728dceec73851bacf Feb 02 10:56:44 crc kubenswrapper[4838]: I0202 10:56:44.980221 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-w48jn"] Feb 02 10:56:45 crc kubenswrapper[4838]: I0202 10:56:45.158404 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-845fd9f844-btlt9"] Feb 02 10:56:45 crc kubenswrapper[4838]: W0202 10:56:45.174935 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c0d0e39_41fa_479b_b19f_9897ffd25de8.slice/crio-ed0461f1c5df2da4cf5fa77027ea9a5b75521d3a7572b79afad6a338a495b309 WatchSource:0}: Error finding container ed0461f1c5df2da4cf5fa77027ea9a5b75521d3a7572b79afad6a338a495b309: Status 404 returned error can't find the container with id ed0461f1c5df2da4cf5fa77027ea9a5b75521d3a7572b79afad6a338a495b309 Feb 02 10:56:45 crc kubenswrapper[4838]: E0202 10:56:45.213012 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Feb 02 10:56:45 crc kubenswrapper[4838]: E0202 10:56:45.213298 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-487wm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-pl5pl_openshift-marketplace(193925e0-1419-444b-9e75-ed7371081181): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 10:56:45 crc kubenswrapper[4838]: E0202 10:56:45.214676 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-pl5pl" podUID="193925e0-1419-444b-9e75-ed7371081181" Feb 02 10:56:45 crc kubenswrapper[4838]: E0202 10:56:45.248164 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Feb 02 10:56:45 crc kubenswrapper[4838]: E0202 10:56:45.248346 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5gk2l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-tnscg_openshift-marketplace(bf0d00a9-64a4-46b9-9bc7-8617b0f3692a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 10:56:45 crc kubenswrapper[4838]: E0202 10:56:45.250347 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-tnscg" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" Feb 02 10:56:45 crc kubenswrapper[4838]: E0202 10:56:45.367409 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Feb 02 10:56:45 crc kubenswrapper[4838]: E0202 10:56:45.368885 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l8wwd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-rbh6x_openshift-marketplace(21ad3b27-d69d-4db6-a1c6-fac312ad582d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 10:56:45 crc kubenswrapper[4838]: E0202 10:56:45.370079 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-rbh6x" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" Feb 02 10:56:45 crc kubenswrapper[4838]: I0202 10:56:45.430249 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 10:56:45 crc kubenswrapper[4838]: I0202 10:56:45.430312 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 10:56:45 crc kubenswrapper[4838]: I0202 10:56:45.933427 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-kdnnp" event={"ID":"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba","Type":"ContainerStarted","Data":"eaa11087f4e31c5c55f39cc8e1f679c215214517cdb0b7124efdb84353b464ed"} Feb 02 10:56:45 crc kubenswrapper[4838]: I0202 10:56:45.933517 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-kdnnp" event={"ID":"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba","Type":"ContainerStarted","Data":"4810d10a408db3faf4d9f773a4bc96976fbb2b3f48465e06ade4e4f9f5ef51b0"} Feb 02 10:56:45 crc kubenswrapper[4838]: I0202 10:56:45.933533 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-kdnnp" event={"ID":"c5fedaf5-8b51-4d67-b0b2-1709adf5e2ba","Type":"ContainerStarted","Data":"2cd458015be685958a02d8b9cff07936b3b6946653ef24cef5413a647d6166a5"} Feb 02 10:56:45 crc kubenswrapper[4838]: E0202 10:56:45.933707 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Feb 02 10:56:45 crc kubenswrapper[4838]: E0202 10:56:45.933860 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8n2xt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-g2rlc_openshift-marketplace(da4cb631-2c1a-4acd-8e10-cf1fbdb099e1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 10:56:45 crc kubenswrapper[4838]: E0202 10:56:45.934983 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-g2rlc" podUID="da4cb631-2c1a-4acd-8e10-cf1fbdb099e1" Feb 02 10:56:45 crc kubenswrapper[4838]: I0202 10:56:45.935994 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"86017b1b-8411-4ee3-bffd-d71c26b535fb","Type":"ContainerStarted","Data":"da7210448a7926da633ab6d2ab513fac696aa8c5568ee670eef57c9f1e65d40a"} Feb 02 10:56:45 crc kubenswrapper[4838]: I0202 10:56:45.936046 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"86017b1b-8411-4ee3-bffd-d71c26b535fb","Type":"ContainerStarted","Data":"3c1974631ad32de29b6c742a74a8110fea0c40f6b660a3c52562d8349c3609f4"} Feb 02 10:56:45 crc kubenswrapper[4838]: I0202 10:56:45.940951 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c5a4810d-7433-4fe3-a231-a9ba2a238ce1","Type":"ContainerStarted","Data":"7c983bd01067393bb40f1236a00b9c5df0a5ccbe12bb81f94966b760b3e11239"} Feb 02 10:56:45 crc kubenswrapper[4838]: I0202 10:56:45.946026 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" event={"ID":"7c0d0e39-41fa-479b-b19f-9897ffd25de8","Type":"ContainerStarted","Data":"cd0b9f5de765ae91c1bef48b177cb4af2d190d30905ebfef0f72f4786acbf4be"} Feb 02 10:56:45 crc kubenswrapper[4838]: I0202 10:56:45.946075 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" event={"ID":"7c0d0e39-41fa-479b-b19f-9897ffd25de8","Type":"ContainerStarted","Data":"ed0461f1c5df2da4cf5fa77027ea9a5b75521d3a7572b79afad6a338a495b309"} Feb 02 10:56:45 crc kubenswrapper[4838]: I0202 10:56:45.946255 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" Feb 02 10:56:45 crc kubenswrapper[4838]: I0202 10:56:45.971394 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" Feb 02 10:56:45 crc kubenswrapper[4838]: I0202 10:56:45.975291 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"2a16c769-012d-4a30-b9db-5629ed018ef8","Type":"ContainerStarted","Data":"7a4ab72405ec81f4fdbbcd867ddfdab8f8f3e8d9a8f3e00172852c3acee8c747"} Feb 02 10:56:45 crc kubenswrapper[4838]: I0202 10:56:45.975370 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"2a16c769-012d-4a30-b9db-5629ed018ef8","Type":"ContainerStarted","Data":"517725d0e05c24deecf3233357cc05eb6511e22587a9417728dceec73851bacf"} Feb 02 10:56:45 crc kubenswrapper[4838]: E0202 10:56:45.977890 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-tnscg" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" Feb 02 10:56:45 crc kubenswrapper[4838]: E0202 10:56:45.977955 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-pl5pl" podUID="193925e0-1419-444b-9e75-ed7371081181" Feb 02 10:56:45 crc kubenswrapper[4838]: E0202 10:56:45.977993 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-rbh6x" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" Feb 02 10:56:45 crc kubenswrapper[4838]: I0202 10:56:45.995993 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-kdnnp" podStartSLOduration=175.995969495 podStartE2EDuration="2m55.995969495s" podCreationTimestamp="2026-02-02 10:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:56:45.955259314 +0000 UTC m=+200.292360382" watchObservedRunningTime="2026-02-02 10:56:45.995969495 +0000 UTC m=+200.333070533" Feb 02 10:56:45 crc kubenswrapper[4838]: I0202 10:56:45.997260 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" podStartSLOduration=12.99724728 podStartE2EDuration="12.99724728s" podCreationTimestamp="2026-02-02 10:56:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:56:45.991947482 +0000 UTC m=+200.329048540" watchObservedRunningTime="2026-02-02 10:56:45.99724728 +0000 UTC m=+200.334348348" Feb 02 10:56:46 crc kubenswrapper[4838]: I0202 10:56:46.040534 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=11.040515043 podStartE2EDuration="11.040515043s" podCreationTimestamp="2026-02-02 10:56:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:56:46.019287448 +0000 UTC m=+200.356388476" watchObservedRunningTime="2026-02-02 10:56:46.040515043 +0000 UTC m=+200.377616061" Feb 02 10:56:46 crc kubenswrapper[4838]: I0202 10:56:46.089682 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=45.08966632 podStartE2EDuration="45.08966632s" podCreationTimestamp="2026-02-02 10:56:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:56:46.05505867 +0000 UTC m=+200.392159728" watchObservedRunningTime="2026-02-02 10:56:46.08966632 +0000 UTC m=+200.426767348" Feb 02 10:56:46 crc kubenswrapper[4838]: I0202 10:56:46.103262 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=5.103244491 podStartE2EDuration="5.103244491s" podCreationTimestamp="2026-02-02 10:56:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:56:46.102609413 +0000 UTC m=+200.439710451" watchObservedRunningTime="2026-02-02 10:56:46.103244491 +0000 UTC m=+200.440345519" Feb 02 10:56:46 crc kubenswrapper[4838]: I0202 10:56:46.517553 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92924f81-e588-47e1-84d1-766c9774f6d1" path="/var/lib/kubelet/pods/92924f81-e588-47e1-84d1-766c9774f6d1/volumes" Feb 02 10:56:46 crc kubenswrapper[4838]: I0202 10:56:46.519071 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db54cce9-ff9d-4772-abf3-01f15ecb8075" path="/var/lib/kubelet/pods/db54cce9-ff9d-4772-abf3-01f15ecb8075/volumes" Feb 02 10:56:46 crc kubenswrapper[4838]: I0202 10:56:46.983594 4838 generic.go:334] "Generic (PLEG): container finished" podID="86017b1b-8411-4ee3-bffd-d71c26b535fb" containerID="da7210448a7926da633ab6d2ab513fac696aa8c5568ee670eef57c9f1e65d40a" exitCode=0 Feb 02 10:56:46 crc kubenswrapper[4838]: I0202 10:56:46.983715 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"86017b1b-8411-4ee3-bffd-d71c26b535fb","Type":"ContainerDied","Data":"da7210448a7926da633ab6d2ab513fac696aa8c5568ee670eef57c9f1e65d40a"} Feb 02 10:56:46 crc kubenswrapper[4838]: I0202 10:56:46.986147 4838 generic.go:334] "Generic (PLEG): container finished" podID="c5a4810d-7433-4fe3-a231-a9ba2a238ce1" containerID="7c983bd01067393bb40f1236a00b9c5df0a5ccbe12bb81f94966b760b3e11239" exitCode=0 Feb 02 10:56:46 crc kubenswrapper[4838]: I0202 10:56:46.986290 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c5a4810d-7433-4fe3-a231-a9ba2a238ce1","Type":"ContainerDied","Data":"7c983bd01067393bb40f1236a00b9c5df0a5ccbe12bb81f94966b760b3e11239"} Feb 02 10:56:46 crc kubenswrapper[4838]: E0202 10:56:46.988969 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-g2rlc" podUID="da4cb631-2c1a-4acd-8e10-cf1fbdb099e1" Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.154979 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs"] Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.156060 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.158612 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.158612 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.158710 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.158760 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.159114 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.159348 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.170578 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs"] Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.305873 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19a63aed-487b-4667-8df9-7a3327b27c8f-serving-cert\") pod \"route-controller-manager-7cb8f75548-7jljs\" (UID: \"19a63aed-487b-4667-8df9-7a3327b27c8f\") " pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.306119 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19a63aed-487b-4667-8df9-7a3327b27c8f-client-ca\") pod \"route-controller-manager-7cb8f75548-7jljs\" (UID: \"19a63aed-487b-4667-8df9-7a3327b27c8f\") " pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.306262 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19a63aed-487b-4667-8df9-7a3327b27c8f-config\") pod \"route-controller-manager-7cb8f75548-7jljs\" (UID: \"19a63aed-487b-4667-8df9-7a3327b27c8f\") " pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.306320 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4b7qb\" (UniqueName: \"kubernetes.io/projected/19a63aed-487b-4667-8df9-7a3327b27c8f-kube-api-access-4b7qb\") pod \"route-controller-manager-7cb8f75548-7jljs\" (UID: \"19a63aed-487b-4667-8df9-7a3327b27c8f\") " pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.407809 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19a63aed-487b-4667-8df9-7a3327b27c8f-client-ca\") pod \"route-controller-manager-7cb8f75548-7jljs\" (UID: \"19a63aed-487b-4667-8df9-7a3327b27c8f\") " pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.407950 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19a63aed-487b-4667-8df9-7a3327b27c8f-config\") pod \"route-controller-manager-7cb8f75548-7jljs\" (UID: \"19a63aed-487b-4667-8df9-7a3327b27c8f\") " pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.408027 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4b7qb\" (UniqueName: \"kubernetes.io/projected/19a63aed-487b-4667-8df9-7a3327b27c8f-kube-api-access-4b7qb\") pod \"route-controller-manager-7cb8f75548-7jljs\" (UID: \"19a63aed-487b-4667-8df9-7a3327b27c8f\") " pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.408077 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19a63aed-487b-4667-8df9-7a3327b27c8f-serving-cert\") pod \"route-controller-manager-7cb8f75548-7jljs\" (UID: \"19a63aed-487b-4667-8df9-7a3327b27c8f\") " pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.408734 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19a63aed-487b-4667-8df9-7a3327b27c8f-client-ca\") pod \"route-controller-manager-7cb8f75548-7jljs\" (UID: \"19a63aed-487b-4667-8df9-7a3327b27c8f\") " pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.410130 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19a63aed-487b-4667-8df9-7a3327b27c8f-config\") pod \"route-controller-manager-7cb8f75548-7jljs\" (UID: \"19a63aed-487b-4667-8df9-7a3327b27c8f\") " pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.422075 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19a63aed-487b-4667-8df9-7a3327b27c8f-serving-cert\") pod \"route-controller-manager-7cb8f75548-7jljs\" (UID: \"19a63aed-487b-4667-8df9-7a3327b27c8f\") " pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.439235 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4b7qb\" (UniqueName: \"kubernetes.io/projected/19a63aed-487b-4667-8df9-7a3327b27c8f-kube-api-access-4b7qb\") pod \"route-controller-manager-7cb8f75548-7jljs\" (UID: \"19a63aed-487b-4667-8df9-7a3327b27c8f\") " pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.482081 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.738986 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs"] Feb 02 10:56:47 crc kubenswrapper[4838]: W0202 10:56:47.744565 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19a63aed_487b_4667_8df9_7a3327b27c8f.slice/crio-8102ae2c308aa39008cff126706e9498d589b6831c663a74a118616e61310c81 WatchSource:0}: Error finding container 8102ae2c308aa39008cff126706e9498d589b6831c663a74a118616e61310c81: Status 404 returned error can't find the container with id 8102ae2c308aa39008cff126706e9498d589b6831c663a74a118616e61310c81 Feb 02 10:56:47 crc kubenswrapper[4838]: I0202 10:56:47.994213 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" event={"ID":"19a63aed-487b-4667-8df9-7a3327b27c8f","Type":"ContainerStarted","Data":"8102ae2c308aa39008cff126706e9498d589b6831c663a74a118616e61310c81"} Feb 02 10:56:48 crc kubenswrapper[4838]: I0202 10:56:48.282848 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 10:56:48 crc kubenswrapper[4838]: I0202 10:56:48.323071 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c5a4810d-7433-4fe3-a231-a9ba2a238ce1-kubelet-dir\") pod \"c5a4810d-7433-4fe3-a231-a9ba2a238ce1\" (UID: \"c5a4810d-7433-4fe3-a231-a9ba2a238ce1\") " Feb 02 10:56:48 crc kubenswrapper[4838]: I0202 10:56:48.323402 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c5a4810d-7433-4fe3-a231-a9ba2a238ce1-kube-api-access\") pod \"c5a4810d-7433-4fe3-a231-a9ba2a238ce1\" (UID: \"c5a4810d-7433-4fe3-a231-a9ba2a238ce1\") " Feb 02 10:56:48 crc kubenswrapper[4838]: I0202 10:56:48.323185 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c5a4810d-7433-4fe3-a231-a9ba2a238ce1-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "c5a4810d-7433-4fe3-a231-a9ba2a238ce1" (UID: "c5a4810d-7433-4fe3-a231-a9ba2a238ce1"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 10:56:48 crc kubenswrapper[4838]: I0202 10:56:48.323827 4838 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c5a4810d-7433-4fe3-a231-a9ba2a238ce1-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 02 10:56:48 crc kubenswrapper[4838]: I0202 10:56:48.328196 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5a4810d-7433-4fe3-a231-a9ba2a238ce1-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "c5a4810d-7433-4fe3-a231-a9ba2a238ce1" (UID: "c5a4810d-7433-4fe3-a231-a9ba2a238ce1"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:56:48 crc kubenswrapper[4838]: I0202 10:56:48.344781 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 10:56:48 crc kubenswrapper[4838]: I0202 10:56:48.424058 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/86017b1b-8411-4ee3-bffd-d71c26b535fb-kubelet-dir\") pod \"86017b1b-8411-4ee3-bffd-d71c26b535fb\" (UID: \"86017b1b-8411-4ee3-bffd-d71c26b535fb\") " Feb 02 10:56:48 crc kubenswrapper[4838]: I0202 10:56:48.424144 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/86017b1b-8411-4ee3-bffd-d71c26b535fb-kube-api-access\") pod \"86017b1b-8411-4ee3-bffd-d71c26b535fb\" (UID: \"86017b1b-8411-4ee3-bffd-d71c26b535fb\") " Feb 02 10:56:48 crc kubenswrapper[4838]: I0202 10:56:48.424231 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/86017b1b-8411-4ee3-bffd-d71c26b535fb-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "86017b1b-8411-4ee3-bffd-d71c26b535fb" (UID: "86017b1b-8411-4ee3-bffd-d71c26b535fb"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 10:56:48 crc kubenswrapper[4838]: I0202 10:56:48.424293 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c5a4810d-7433-4fe3-a231-a9ba2a238ce1-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 10:56:48 crc kubenswrapper[4838]: I0202 10:56:48.428291 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86017b1b-8411-4ee3-bffd-d71c26b535fb-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "86017b1b-8411-4ee3-bffd-d71c26b535fb" (UID: "86017b1b-8411-4ee3-bffd-d71c26b535fb"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:56:48 crc kubenswrapper[4838]: I0202 10:56:48.525704 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/86017b1b-8411-4ee3-bffd-d71c26b535fb-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 10:56:48 crc kubenswrapper[4838]: I0202 10:56:48.525747 4838 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/86017b1b-8411-4ee3-bffd-d71c26b535fb-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 02 10:56:49 crc kubenswrapper[4838]: I0202 10:56:49.004971 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 10:56:49 crc kubenswrapper[4838]: I0202 10:56:49.004987 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"86017b1b-8411-4ee3-bffd-d71c26b535fb","Type":"ContainerDied","Data":"3c1974631ad32de29b6c742a74a8110fea0c40f6b660a3c52562d8349c3609f4"} Feb 02 10:56:49 crc kubenswrapper[4838]: I0202 10:56:49.005032 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3c1974631ad32de29b6c742a74a8110fea0c40f6b660a3c52562d8349c3609f4" Feb 02 10:56:49 crc kubenswrapper[4838]: I0202 10:56:49.010748 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c5a4810d-7433-4fe3-a231-a9ba2a238ce1","Type":"ContainerDied","Data":"d339bc6660ebedb96049cfc928bcfe191d833f79d0392d084183d09a884779e3"} Feb 02 10:56:49 crc kubenswrapper[4838]: I0202 10:56:49.011027 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d339bc6660ebedb96049cfc928bcfe191d833f79d0392d084183d09a884779e3" Feb 02 10:56:49 crc kubenswrapper[4838]: I0202 10:56:49.011103 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 10:56:49 crc kubenswrapper[4838]: I0202 10:56:49.022868 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" event={"ID":"19a63aed-487b-4667-8df9-7a3327b27c8f","Type":"ContainerStarted","Data":"49766b85beb6c601de9f86bae52f44bd5d16956c8dc977a77f4fa1701b359ee3"} Feb 02 10:56:49 crc kubenswrapper[4838]: I0202 10:56:49.024003 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" Feb 02 10:56:49 crc kubenswrapper[4838]: I0202 10:56:49.030353 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" Feb 02 10:56:49 crc kubenswrapper[4838]: I0202 10:56:49.048980 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" podStartSLOduration=16.048966478 podStartE2EDuration="16.048966478s" podCreationTimestamp="2026-02-02 10:56:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:56:49.045569443 +0000 UTC m=+203.382670511" watchObservedRunningTime="2026-02-02 10:56:49.048966478 +0000 UTC m=+203.386067516" Feb 02 10:56:50 crc kubenswrapper[4838]: I0202 10:56:50.031165 4838 generic.go:334] "Generic (PLEG): container finished" podID="cdb7ade3-e1b6-436b-a5df-3abb972b72fa" containerID="7e87aa44dc34b117da005ccbe406770c8e9232b16085f5cd591430268174549b" exitCode=0 Feb 02 10:56:50 crc kubenswrapper[4838]: I0202 10:56:50.031255 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4f9j" event={"ID":"cdb7ade3-e1b6-436b-a5df-3abb972b72fa","Type":"ContainerDied","Data":"7e87aa44dc34b117da005ccbe406770c8e9232b16085f5cd591430268174549b"} Feb 02 10:56:51 crc kubenswrapper[4838]: I0202 10:56:51.038305 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4f9j" event={"ID":"cdb7ade3-e1b6-436b-a5df-3abb972b72fa","Type":"ContainerStarted","Data":"087fedd9713829e266f735e578f7a5c5329112bf55b6288ef5410d669a789c2c"} Feb 02 10:56:51 crc kubenswrapper[4838]: I0202 10:56:51.058795 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-m4f9j" podStartSLOduration=3.997180691 podStartE2EDuration="57.058776889s" podCreationTimestamp="2026-02-02 10:55:54 +0000 UTC" firstStartedPulling="2026-02-02 10:55:57.471990963 +0000 UTC m=+151.809091991" lastFinishedPulling="2026-02-02 10:56:50.533587161 +0000 UTC m=+204.870688189" observedRunningTime="2026-02-02 10:56:51.056033992 +0000 UTC m=+205.393135020" watchObservedRunningTime="2026-02-02 10:56:51.058776889 +0000 UTC m=+205.395877937" Feb 02 10:56:53 crc kubenswrapper[4838]: I0202 10:56:53.053066 4838 generic.go:334] "Generic (PLEG): container finished" podID="d7b6d22c-5441-4f5c-830b-17d67446352d" containerID="67c288d5fe396bb70a6fc6d7ff09a745c878afe646b0cf0f55d69404465cac8e" exitCode=0 Feb 02 10:56:53 crc kubenswrapper[4838]: I0202 10:56:53.053137 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ntc2n" event={"ID":"d7b6d22c-5441-4f5c-830b-17d67446352d","Type":"ContainerDied","Data":"67c288d5fe396bb70a6fc6d7ff09a745c878afe646b0cf0f55d69404465cac8e"} Feb 02 10:56:54 crc kubenswrapper[4838]: I0202 10:56:54.061397 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ntc2n" event={"ID":"d7b6d22c-5441-4f5c-830b-17d67446352d","Type":"ContainerStarted","Data":"eab2da227dc61318787dee5786dede55b7ba45d35e1fa080d6ebb36b8f39b2f3"} Feb 02 10:56:54 crc kubenswrapper[4838]: I0202 10:56:54.083318 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ntc2n" podStartSLOduration=4.036256765 podStartE2EDuration="1m1.083294874s" podCreationTimestamp="2026-02-02 10:55:53 +0000 UTC" firstStartedPulling="2026-02-02 10:55:56.417163346 +0000 UTC m=+150.754264384" lastFinishedPulling="2026-02-02 10:56:53.464201465 +0000 UTC m=+207.801302493" observedRunningTime="2026-02-02 10:56:54.082043309 +0000 UTC m=+208.419144367" watchObservedRunningTime="2026-02-02 10:56:54.083294874 +0000 UTC m=+208.420395932" Feb 02 10:56:54 crc kubenswrapper[4838]: I0202 10:56:54.960301 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ntc2n" Feb 02 10:56:54 crc kubenswrapper[4838]: I0202 10:56:54.960372 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ntc2n" Feb 02 10:56:55 crc kubenswrapper[4838]: I0202 10:56:55.466713 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-m4f9j" Feb 02 10:56:55 crc kubenswrapper[4838]: I0202 10:56:55.467118 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-m4f9j" Feb 02 10:56:55 crc kubenswrapper[4838]: I0202 10:56:55.541961 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-m4f9j" Feb 02 10:56:56 crc kubenswrapper[4838]: I0202 10:56:56.122825 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-m4f9j" Feb 02 10:56:56 crc kubenswrapper[4838]: I0202 10:56:56.233234 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-ntc2n" podUID="d7b6d22c-5441-4f5c-830b-17d67446352d" containerName="registry-server" probeResult="failure" output=< Feb 02 10:56:56 crc kubenswrapper[4838]: timeout: failed to connect service ":50051" within 1s Feb 02 10:56:56 crc kubenswrapper[4838]: > Feb 02 10:56:57 crc kubenswrapper[4838]: I0202 10:56:57.079945 4838 generic.go:334] "Generic (PLEG): container finished" podID="429f9b8c-ff40-4398-811b-01c8702b65b1" containerID="ffea144da496d452edd1e73399d93456304604e5349c5eff53ae950e478716b1" exitCode=0 Feb 02 10:56:57 crc kubenswrapper[4838]: I0202 10:56:57.080057 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqklr" event={"ID":"429f9b8c-ff40-4398-811b-01c8702b65b1","Type":"ContainerDied","Data":"ffea144da496d452edd1e73399d93456304604e5349c5eff53ae950e478716b1"} Feb 02 10:56:58 crc kubenswrapper[4838]: I0202 10:56:58.105439 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqklr" event={"ID":"429f9b8c-ff40-4398-811b-01c8702b65b1","Type":"ContainerStarted","Data":"76fa6a90ee4fa2bf406a2b7af40260b5e1d58c811e1183e422bb4cf2e9ee42b9"} Feb 02 10:56:58 crc kubenswrapper[4838]: I0202 10:56:58.123180 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nqklr" podStartSLOduration=1.972968921 podStartE2EDuration="1m2.123162402s" podCreationTimestamp="2026-02-02 10:55:56 +0000 UTC" firstStartedPulling="2026-02-02 10:55:57.479235255 +0000 UTC m=+151.816336283" lastFinishedPulling="2026-02-02 10:56:57.629428736 +0000 UTC m=+211.966529764" observedRunningTime="2026-02-02 10:56:58.120387184 +0000 UTC m=+212.457488212" watchObservedRunningTime="2026-02-02 10:56:58.123162402 +0000 UTC m=+212.460263440" Feb 02 10:57:00 crc kubenswrapper[4838]: I0202 10:57:00.118137 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pl5pl" event={"ID":"193925e0-1419-444b-9e75-ed7371081181","Type":"ContainerStarted","Data":"a5b683382415401dafe8670fcc9267e53809e00f5090c75867acf5f19006ce41"} Feb 02 10:57:01 crc kubenswrapper[4838]: I0202 10:57:01.130572 4838 generic.go:334] "Generic (PLEG): container finished" podID="193925e0-1419-444b-9e75-ed7371081181" containerID="a5b683382415401dafe8670fcc9267e53809e00f5090c75867acf5f19006ce41" exitCode=0 Feb 02 10:57:01 crc kubenswrapper[4838]: I0202 10:57:01.130657 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pl5pl" event={"ID":"193925e0-1419-444b-9e75-ed7371081181","Type":"ContainerDied","Data":"a5b683382415401dafe8670fcc9267e53809e00f5090c75867acf5f19006ce41"} Feb 02 10:57:03 crc kubenswrapper[4838]: I0202 10:57:03.143673 4838 generic.go:334] "Generic (PLEG): container finished" podID="da4cb631-2c1a-4acd-8e10-cf1fbdb099e1" containerID="2bfb7d92833bd258bc226e3fe9c901474cdff71d36f88dcb95fe30e50f353d6e" exitCode=0 Feb 02 10:57:03 crc kubenswrapper[4838]: I0202 10:57:03.143839 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g2rlc" event={"ID":"da4cb631-2c1a-4acd-8e10-cf1fbdb099e1","Type":"ContainerDied","Data":"2bfb7d92833bd258bc226e3fe9c901474cdff71d36f88dcb95fe30e50f353d6e"} Feb 02 10:57:03 crc kubenswrapper[4838]: I0202 10:57:03.149947 4838 generic.go:334] "Generic (PLEG): container finished" podID="f98a7f3b-5730-4469-aef3-188a9755f566" containerID="54672b04a5c34d13586d6761222042345c79e24d51cc22c071f8c47ef4dbe060" exitCode=0 Feb 02 10:57:03 crc kubenswrapper[4838]: I0202 10:57:03.150028 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qwkhj" event={"ID":"f98a7f3b-5730-4469-aef3-188a9755f566","Type":"ContainerDied","Data":"54672b04a5c34d13586d6761222042345c79e24d51cc22c071f8c47ef4dbe060"} Feb 02 10:57:03 crc kubenswrapper[4838]: I0202 10:57:03.155178 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pl5pl" event={"ID":"193925e0-1419-444b-9e75-ed7371081181","Type":"ContainerStarted","Data":"1ccf99eff81a9257eb4c7e3fe871e1b87b43262f9d7034992255a0f579ebe3cf"} Feb 02 10:57:03 crc kubenswrapper[4838]: I0202 10:57:03.157323 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rbh6x" event={"ID":"21ad3b27-d69d-4db6-a1c6-fac312ad582d","Type":"ContainerStarted","Data":"5405f4d10679447c248fc2347f0304c5a77c4451a5ba5b9c0a55f719a3358eda"} Feb 02 10:57:03 crc kubenswrapper[4838]: I0202 10:57:03.210981 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pl5pl" podStartSLOduration=2.117751973 podStartE2EDuration="1m6.210960825s" podCreationTimestamp="2026-02-02 10:55:57 +0000 UTC" firstStartedPulling="2026-02-02 10:55:58.508254135 +0000 UTC m=+152.845355163" lastFinishedPulling="2026-02-02 10:57:02.601462987 +0000 UTC m=+216.938564015" observedRunningTime="2026-02-02 10:57:03.209802073 +0000 UTC m=+217.546903101" watchObservedRunningTime="2026-02-02 10:57:03.210960825 +0000 UTC m=+217.548061843" Feb 02 10:57:04 crc kubenswrapper[4838]: I0202 10:57:04.165413 4838 generic.go:334] "Generic (PLEG): container finished" podID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" containerID="5405f4d10679447c248fc2347f0304c5a77c4451a5ba5b9c0a55f719a3358eda" exitCode=0 Feb 02 10:57:04 crc kubenswrapper[4838]: I0202 10:57:04.165471 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rbh6x" event={"ID":"21ad3b27-d69d-4db6-a1c6-fac312ad582d","Type":"ContainerDied","Data":"5405f4d10679447c248fc2347f0304c5a77c4451a5ba5b9c0a55f719a3358eda"} Feb 02 10:57:05 crc kubenswrapper[4838]: I0202 10:57:05.004154 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ntc2n" Feb 02 10:57:05 crc kubenswrapper[4838]: I0202 10:57:05.046376 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ntc2n" Feb 02 10:57:05 crc kubenswrapper[4838]: I0202 10:57:05.172584 4838 generic.go:334] "Generic (PLEG): container finished" podID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" containerID="7dcab8d17ec0136d70c2085a28f0bedeb6c35bda3dd71c47db3df4d1b252220e" exitCode=0 Feb 02 10:57:05 crc kubenswrapper[4838]: I0202 10:57:05.172658 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tnscg" event={"ID":"bf0d00a9-64a4-46b9-9bc7-8617b0f3692a","Type":"ContainerDied","Data":"7dcab8d17ec0136d70c2085a28f0bedeb6c35bda3dd71c47db3df4d1b252220e"} Feb 02 10:57:06 crc kubenswrapper[4838]: I0202 10:57:06.707540 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nqklr" Feb 02 10:57:06 crc kubenswrapper[4838]: I0202 10:57:06.708125 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nqklr" Feb 02 10:57:06 crc kubenswrapper[4838]: I0202 10:57:06.771913 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nqklr" Feb 02 10:57:07 crc kubenswrapper[4838]: I0202 10:57:07.234710 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nqklr" Feb 02 10:57:07 crc kubenswrapper[4838]: I0202 10:57:07.703502 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pl5pl" Feb 02 10:57:07 crc kubenswrapper[4838]: I0202 10:57:07.703555 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pl5pl" Feb 02 10:57:08 crc kubenswrapper[4838]: I0202 10:57:08.740838 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-pl5pl" podUID="193925e0-1419-444b-9e75-ed7371081181" containerName="registry-server" probeResult="failure" output=< Feb 02 10:57:08 crc kubenswrapper[4838]: timeout: failed to connect service ":50051" within 1s Feb 02 10:57:08 crc kubenswrapper[4838]: > Feb 02 10:57:09 crc kubenswrapper[4838]: I0202 10:57:09.195827 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g2rlc" event={"ID":"da4cb631-2c1a-4acd-8e10-cf1fbdb099e1","Type":"ContainerStarted","Data":"5ee06945882e6d23668e1ee0045622d8fc9978e288a285a3f2c1e7e78120e95e"} Feb 02 10:57:09 crc kubenswrapper[4838]: I0202 10:57:09.214724 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-g2rlc" podStartSLOduration=3.846191084 podStartE2EDuration="1m15.214708067s" podCreationTimestamp="2026-02-02 10:55:54 +0000 UTC" firstStartedPulling="2026-02-02 10:55:56.457970282 +0000 UTC m=+150.795071320" lastFinishedPulling="2026-02-02 10:57:07.826487275 +0000 UTC m=+222.163588303" observedRunningTime="2026-02-02 10:57:09.211740104 +0000 UTC m=+223.548841212" watchObservedRunningTime="2026-02-02 10:57:09.214708067 +0000 UTC m=+223.551809095" Feb 02 10:57:10 crc kubenswrapper[4838]: I0202 10:57:10.900013 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqklr"] Feb 02 10:57:10 crc kubenswrapper[4838]: I0202 10:57:10.900427 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nqklr" podUID="429f9b8c-ff40-4398-811b-01c8702b65b1" containerName="registry-server" containerID="cri-o://76fa6a90ee4fa2bf406a2b7af40260b5e1d58c811e1183e422bb4cf2e9ee42b9" gracePeriod=2 Feb 02 10:57:12 crc kubenswrapper[4838]: I0202 10:57:12.213560 4838 generic.go:334] "Generic (PLEG): container finished" podID="429f9b8c-ff40-4398-811b-01c8702b65b1" containerID="76fa6a90ee4fa2bf406a2b7af40260b5e1d58c811e1183e422bb4cf2e9ee42b9" exitCode=0 Feb 02 10:57:12 crc kubenswrapper[4838]: I0202 10:57:12.213652 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqklr" event={"ID":"429f9b8c-ff40-4398-811b-01c8702b65b1","Type":"ContainerDied","Data":"76fa6a90ee4fa2bf406a2b7af40260b5e1d58c811e1183e422bb4cf2e9ee42b9"} Feb 02 10:57:13 crc kubenswrapper[4838]: I0202 10:57:13.508293 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-845fd9f844-btlt9"] Feb 02 10:57:13 crc kubenswrapper[4838]: I0202 10:57:13.508701 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" podUID="7c0d0e39-41fa-479b-b19f-9897ffd25de8" containerName="controller-manager" containerID="cri-o://cd0b9f5de765ae91c1bef48b177cb4af2d190d30905ebfef0f72f4786acbf4be" gracePeriod=30 Feb 02 10:57:13 crc kubenswrapper[4838]: I0202 10:57:13.605371 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs"] Feb 02 10:57:13 crc kubenswrapper[4838]: I0202 10:57:13.605952 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" podUID="19a63aed-487b-4667-8df9-7a3327b27c8f" containerName="route-controller-manager" containerID="cri-o://49766b85beb6c601de9f86bae52f44bd5d16956c8dc977a77f4fa1701b359ee3" gracePeriod=30 Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.188519 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nqklr" Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.246794 4838 generic.go:334] "Generic (PLEG): container finished" podID="7c0d0e39-41fa-479b-b19f-9897ffd25de8" containerID="cd0b9f5de765ae91c1bef48b177cb4af2d190d30905ebfef0f72f4786acbf4be" exitCode=0 Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.246872 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" event={"ID":"7c0d0e39-41fa-479b-b19f-9897ffd25de8","Type":"ContainerDied","Data":"cd0b9f5de765ae91c1bef48b177cb4af2d190d30905ebfef0f72f4786acbf4be"} Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.249105 4838 generic.go:334] "Generic (PLEG): container finished" podID="19a63aed-487b-4667-8df9-7a3327b27c8f" containerID="49766b85beb6c601de9f86bae52f44bd5d16956c8dc977a77f4fa1701b359ee3" exitCode=0 Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.249164 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" event={"ID":"19a63aed-487b-4667-8df9-7a3327b27c8f","Type":"ContainerDied","Data":"49766b85beb6c601de9f86bae52f44bd5d16956c8dc977a77f4fa1701b359ee3"} Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.251475 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqklr" event={"ID":"429f9b8c-ff40-4398-811b-01c8702b65b1","Type":"ContainerDied","Data":"7349a65de5361056c4d484831e83d02c940ee3c358535f0a737ced193e141d03"} Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.251535 4838 scope.go:117] "RemoveContainer" containerID="76fa6a90ee4fa2bf406a2b7af40260b5e1d58c811e1183e422bb4cf2e9ee42b9" Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.251603 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nqklr" Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.300850 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/429f9b8c-ff40-4398-811b-01c8702b65b1-utilities\") pod \"429f9b8c-ff40-4398-811b-01c8702b65b1\" (UID: \"429f9b8c-ff40-4398-811b-01c8702b65b1\") " Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.300896 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/429f9b8c-ff40-4398-811b-01c8702b65b1-catalog-content\") pod \"429f9b8c-ff40-4398-811b-01c8702b65b1\" (UID: \"429f9b8c-ff40-4398-811b-01c8702b65b1\") " Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.300944 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nprlb\" (UniqueName: \"kubernetes.io/projected/429f9b8c-ff40-4398-811b-01c8702b65b1-kube-api-access-nprlb\") pod \"429f9b8c-ff40-4398-811b-01c8702b65b1\" (UID: \"429f9b8c-ff40-4398-811b-01c8702b65b1\") " Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.301722 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/429f9b8c-ff40-4398-811b-01c8702b65b1-utilities" (OuterVolumeSpecName: "utilities") pod "429f9b8c-ff40-4398-811b-01c8702b65b1" (UID: "429f9b8c-ff40-4398-811b-01c8702b65b1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.313858 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/429f9b8c-ff40-4398-811b-01c8702b65b1-kube-api-access-nprlb" (OuterVolumeSpecName: "kube-api-access-nprlb") pod "429f9b8c-ff40-4398-811b-01c8702b65b1" (UID: "429f9b8c-ff40-4398-811b-01c8702b65b1"). InnerVolumeSpecName "kube-api-access-nprlb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.334095 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/429f9b8c-ff40-4398-811b-01c8702b65b1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "429f9b8c-ff40-4398-811b-01c8702b65b1" (UID: "429f9b8c-ff40-4398-811b-01c8702b65b1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.402871 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/429f9b8c-ff40-4398-811b-01c8702b65b1-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.402921 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/429f9b8c-ff40-4398-811b-01c8702b65b1-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.402948 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nprlb\" (UniqueName: \"kubernetes.io/projected/429f9b8c-ff40-4398-811b-01c8702b65b1-kube-api-access-nprlb\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.474818 4838 scope.go:117] "RemoveContainer" containerID="ffea144da496d452edd1e73399d93456304604e5349c5eff53ae950e478716b1" Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.518338 4838 scope.go:117] "RemoveContainer" containerID="aa6d71a7740a3354d2592a86b8ae8b11e260c3c02b877f7ca362b96f77e89f7f" Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.621990 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqklr"] Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.628403 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqklr"] Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.920747 4838 patch_prober.go:28] interesting pod/controller-manager-845fd9f844-btlt9 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.56:8443/healthz\": dial tcp 10.217.0.56:8443: connect: connection refused" start-of-body= Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.920817 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" podUID="7c0d0e39-41fa-479b-b19f-9897ffd25de8" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.56:8443/healthz\": dial tcp 10.217.0.56:8443: connect: connection refused" Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.960837 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-g2rlc" Feb 02 10:57:14 crc kubenswrapper[4838]: I0202 10:57:14.960924 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-g2rlc" Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.009495 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-g2rlc" Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.259357 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rbh6x" event={"ID":"21ad3b27-d69d-4db6-a1c6-fac312ad582d","Type":"ContainerStarted","Data":"90cc1e5c3c847d7308b5f92754f2bcd257d5aa847e6bc2f7927ed934d636743c"} Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.261819 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tnscg" event={"ID":"bf0d00a9-64a4-46b9-9bc7-8617b0f3692a","Type":"ContainerStarted","Data":"e3c356ae730bee9d068384a6a605b708810afff935b31f6cfb83a350d9effaa5"} Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.319796 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-g2rlc" Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.429563 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.429634 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.429686 4838 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.430965 4838 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef"} pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.431065 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" containerID="cri-o://f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef" gracePeriod=600 Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.788061 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.797717 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.927808 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19a63aed-487b-4667-8df9-7a3327b27c8f-client-ca\") pod \"19a63aed-487b-4667-8df9-7a3327b27c8f\" (UID: \"19a63aed-487b-4667-8df9-7a3327b27c8f\") " Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.927852 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19a63aed-487b-4667-8df9-7a3327b27c8f-serving-cert\") pod \"19a63aed-487b-4667-8df9-7a3327b27c8f\" (UID: \"19a63aed-487b-4667-8df9-7a3327b27c8f\") " Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.927893 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c0d0e39-41fa-479b-b19f-9897ffd25de8-proxy-ca-bundles\") pod \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\" (UID: \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\") " Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.927932 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c0d0e39-41fa-479b-b19f-9897ffd25de8-serving-cert\") pod \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\" (UID: \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\") " Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.927953 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4b7qb\" (UniqueName: \"kubernetes.io/projected/19a63aed-487b-4667-8df9-7a3327b27c8f-kube-api-access-4b7qb\") pod \"19a63aed-487b-4667-8df9-7a3327b27c8f\" (UID: \"19a63aed-487b-4667-8df9-7a3327b27c8f\") " Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.928122 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7c0d0e39-41fa-479b-b19f-9897ffd25de8-client-ca\") pod \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\" (UID: \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\") " Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.929022 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lf7ks\" (UniqueName: \"kubernetes.io/projected/7c0d0e39-41fa-479b-b19f-9897ffd25de8-kube-api-access-lf7ks\") pod \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\" (UID: \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\") " Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.929046 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c0d0e39-41fa-479b-b19f-9897ffd25de8-config\") pod \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\" (UID: \"7c0d0e39-41fa-479b-b19f-9897ffd25de8\") " Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.928724 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19a63aed-487b-4667-8df9-7a3327b27c8f-client-ca" (OuterVolumeSpecName: "client-ca") pod "19a63aed-487b-4667-8df9-7a3327b27c8f" (UID: "19a63aed-487b-4667-8df9-7a3327b27c8f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.929079 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19a63aed-487b-4667-8df9-7a3327b27c8f-config\") pod \"19a63aed-487b-4667-8df9-7a3327b27c8f\" (UID: \"19a63aed-487b-4667-8df9-7a3327b27c8f\") " Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.928720 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c0d0e39-41fa-479b-b19f-9897ffd25de8-client-ca" (OuterVolumeSpecName: "client-ca") pod "7c0d0e39-41fa-479b-b19f-9897ffd25de8" (UID: "7c0d0e39-41fa-479b-b19f-9897ffd25de8"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.928734 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c0d0e39-41fa-479b-b19f-9897ffd25de8-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7c0d0e39-41fa-479b-b19f-9897ffd25de8" (UID: "7c0d0e39-41fa-479b-b19f-9897ffd25de8"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.929350 4838 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19a63aed-487b-4667-8df9-7a3327b27c8f-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.929381 4838 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c0d0e39-41fa-479b-b19f-9897ffd25de8-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.929397 4838 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7c0d0e39-41fa-479b-b19f-9897ffd25de8-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.929423 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19a63aed-487b-4667-8df9-7a3327b27c8f-config" (OuterVolumeSpecName: "config") pod "19a63aed-487b-4667-8df9-7a3327b27c8f" (UID: "19a63aed-487b-4667-8df9-7a3327b27c8f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.929770 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c0d0e39-41fa-479b-b19f-9897ffd25de8-config" (OuterVolumeSpecName: "config") pod "7c0d0e39-41fa-479b-b19f-9897ffd25de8" (UID: "7c0d0e39-41fa-479b-b19f-9897ffd25de8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.932658 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19a63aed-487b-4667-8df9-7a3327b27c8f-kube-api-access-4b7qb" (OuterVolumeSpecName: "kube-api-access-4b7qb") pod "19a63aed-487b-4667-8df9-7a3327b27c8f" (UID: "19a63aed-487b-4667-8df9-7a3327b27c8f"). InnerVolumeSpecName "kube-api-access-4b7qb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.932700 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19a63aed-487b-4667-8df9-7a3327b27c8f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "19a63aed-487b-4667-8df9-7a3327b27c8f" (UID: "19a63aed-487b-4667-8df9-7a3327b27c8f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.932846 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c0d0e39-41fa-479b-b19f-9897ffd25de8-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7c0d0e39-41fa-479b-b19f-9897ffd25de8" (UID: "7c0d0e39-41fa-479b-b19f-9897ffd25de8"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:57:15 crc kubenswrapper[4838]: I0202 10:57:15.933725 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c0d0e39-41fa-479b-b19f-9897ffd25de8-kube-api-access-lf7ks" (OuterVolumeSpecName: "kube-api-access-lf7ks") pod "7c0d0e39-41fa-479b-b19f-9897ffd25de8" (UID: "7c0d0e39-41fa-479b-b19f-9897ffd25de8"). InnerVolumeSpecName "kube-api-access-lf7ks". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.029885 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c0d0e39-41fa-479b-b19f-9897ffd25de8-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.029931 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19a63aed-487b-4667-8df9-7a3327b27c8f-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.029945 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19a63aed-487b-4667-8df9-7a3327b27c8f-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.029958 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c0d0e39-41fa-479b-b19f-9897ffd25de8-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.029970 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4b7qb\" (UniqueName: \"kubernetes.io/projected/19a63aed-487b-4667-8df9-7a3327b27c8f-kube-api-access-4b7qb\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.029985 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lf7ks\" (UniqueName: \"kubernetes.io/projected/7c0d0e39-41fa-479b-b19f-9897ffd25de8-kube-api-access-lf7ks\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.280201 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qwkhj" event={"ID":"f98a7f3b-5730-4469-aef3-188a9755f566","Type":"ContainerStarted","Data":"202b4aef59408ecb2a0f098f89c87e44fdf75bd3760f3ef1ae81e786ed0da8dc"} Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.283200 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.284673 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-845fd9f844-btlt9" event={"ID":"7c0d0e39-41fa-479b-b19f-9897ffd25de8","Type":"ContainerDied","Data":"ed0461f1c5df2da4cf5fa77027ea9a5b75521d3a7572b79afad6a338a495b309"} Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.284769 4838 scope.go:117] "RemoveContainer" containerID="cd0b9f5de765ae91c1bef48b177cb4af2d190d30905ebfef0f72f4786acbf4be" Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.291533 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" event={"ID":"19a63aed-487b-4667-8df9-7a3327b27c8f","Type":"ContainerDied","Data":"8102ae2c308aa39008cff126706e9498d589b6831c663a74a118616e61310c81"} Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.291560 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs" Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.299608 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g2rlc"] Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.303266 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qwkhj" podStartSLOduration=4.371472433 podStartE2EDuration="1m19.303245147s" podCreationTimestamp="2026-02-02 10:55:57 +0000 UTC" firstStartedPulling="2026-02-02 10:55:59.543183449 +0000 UTC m=+153.880284477" lastFinishedPulling="2026-02-02 10:57:14.474956143 +0000 UTC m=+228.812057191" observedRunningTime="2026-02-02 10:57:16.301963251 +0000 UTC m=+230.639064289" watchObservedRunningTime="2026-02-02 10:57:16.303245147 +0000 UTC m=+230.640346175" Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.308162 4838 generic.go:334] "Generic (PLEG): container finished" podID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerID="f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef" exitCode=0 Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.309060 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerDied","Data":"f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef"} Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.309132 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerStarted","Data":"788680db6347a60c2bbc6d7b7baac3c8eb4876b9f269eb890bcb4237052aee16"} Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.312485 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tnscg" Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.312773 4838 scope.go:117] "RemoveContainer" containerID="49766b85beb6c601de9f86bae52f44bd5d16956c8dc977a77f4fa1701b359ee3" Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.312861 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tnscg" Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.357169 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tnscg" podStartSLOduration=4.357843925 podStartE2EDuration="1m21.357151668s" podCreationTimestamp="2026-02-02 10:55:55 +0000 UTC" firstStartedPulling="2026-02-02 10:55:57.475495656 +0000 UTC m=+151.812596684" lastFinishedPulling="2026-02-02 10:57:14.474803399 +0000 UTC m=+228.811904427" observedRunningTime="2026-02-02 10:57:16.335754428 +0000 UTC m=+230.672855456" watchObservedRunningTime="2026-02-02 10:57:16.357151668 +0000 UTC m=+230.694252696" Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.380385 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rbh6x" podStartSLOduration=5.595321748 podStartE2EDuration="1m22.380361798s" podCreationTimestamp="2026-02-02 10:55:54 +0000 UTC" firstStartedPulling="2026-02-02 10:55:57.489543118 +0000 UTC m=+151.826644146" lastFinishedPulling="2026-02-02 10:57:14.274583168 +0000 UTC m=+228.611684196" observedRunningTime="2026-02-02 10:57:16.359707159 +0000 UTC m=+230.696808187" watchObservedRunningTime="2026-02-02 10:57:16.380361798 +0000 UTC m=+230.717462836" Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.394807 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-845fd9f844-btlt9"] Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.397427 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-845fd9f844-btlt9"] Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.404434 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs"] Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.406960 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7cb8f75548-7jljs"] Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.513753 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19a63aed-487b-4667-8df9-7a3327b27c8f" path="/var/lib/kubelet/pods/19a63aed-487b-4667-8df9-7a3327b27c8f/volumes" Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.514221 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="429f9b8c-ff40-4398-811b-01c8702b65b1" path="/var/lib/kubelet/pods/429f9b8c-ff40-4398-811b-01c8702b65b1/volumes" Feb 02 10:57:16 crc kubenswrapper[4838]: I0202 10:57:16.514827 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c0d0e39-41fa-479b-b19f-9897ffd25de8" path="/var/lib/kubelet/pods/7c0d0e39-41fa-479b-b19f-9897ffd25de8/volumes" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.181923 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg"] Feb 02 10:57:17 crc kubenswrapper[4838]: E0202 10:57:17.182214 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c0d0e39-41fa-479b-b19f-9897ffd25de8" containerName="controller-manager" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.182227 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c0d0e39-41fa-479b-b19f-9897ffd25de8" containerName="controller-manager" Feb 02 10:57:17 crc kubenswrapper[4838]: E0202 10:57:17.182237 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="429f9b8c-ff40-4398-811b-01c8702b65b1" containerName="extract-utilities" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.182244 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="429f9b8c-ff40-4398-811b-01c8702b65b1" containerName="extract-utilities" Feb 02 10:57:17 crc kubenswrapper[4838]: E0202 10:57:17.182253 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19a63aed-487b-4667-8df9-7a3327b27c8f" containerName="route-controller-manager" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.182261 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="19a63aed-487b-4667-8df9-7a3327b27c8f" containerName="route-controller-manager" Feb 02 10:57:17 crc kubenswrapper[4838]: E0202 10:57:17.182271 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86017b1b-8411-4ee3-bffd-d71c26b535fb" containerName="pruner" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.182276 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="86017b1b-8411-4ee3-bffd-d71c26b535fb" containerName="pruner" Feb 02 10:57:17 crc kubenswrapper[4838]: E0202 10:57:17.182286 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="429f9b8c-ff40-4398-811b-01c8702b65b1" containerName="extract-content" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.182294 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="429f9b8c-ff40-4398-811b-01c8702b65b1" containerName="extract-content" Feb 02 10:57:17 crc kubenswrapper[4838]: E0202 10:57:17.182303 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5a4810d-7433-4fe3-a231-a9ba2a238ce1" containerName="pruner" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.182308 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5a4810d-7433-4fe3-a231-a9ba2a238ce1" containerName="pruner" Feb 02 10:57:17 crc kubenswrapper[4838]: E0202 10:57:17.182317 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="429f9b8c-ff40-4398-811b-01c8702b65b1" containerName="registry-server" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.182322 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="429f9b8c-ff40-4398-811b-01c8702b65b1" containerName="registry-server" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.182456 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="19a63aed-487b-4667-8df9-7a3327b27c8f" containerName="route-controller-manager" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.182469 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5a4810d-7433-4fe3-a231-a9ba2a238ce1" containerName="pruner" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.182476 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="86017b1b-8411-4ee3-bffd-d71c26b535fb" containerName="pruner" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.182484 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c0d0e39-41fa-479b-b19f-9897ffd25de8" containerName="controller-manager" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.182490 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="429f9b8c-ff40-4398-811b-01c8702b65b1" containerName="registry-server" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.182959 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.185267 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.186775 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn"] Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.186845 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.187151 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.187259 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.187397 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.187841 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.188104 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.189683 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.190055 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.190488 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.190827 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.190992 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.191185 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.205383 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.205729 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg"] Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.208890 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn"] Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.317712 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-g2rlc" podUID="da4cb631-2c1a-4acd-8e10-cf1fbdb099e1" containerName="registry-server" containerID="cri-o://5ee06945882e6d23668e1ee0045622d8fc9978e288a285a3f2c1e7e78120e95e" gracePeriod=2 Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.347900 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-config\") pod \"route-controller-manager-766676dc96-d44sg\" (UID: \"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328\") " pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.347977 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-proxy-ca-bundles\") pod \"controller-manager-7f7d77ff9f-8p4qn\" (UID: \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\") " pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.348045 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhrsj\" (UniqueName: \"kubernetes.io/projected/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-kube-api-access-bhrsj\") pod \"controller-manager-7f7d77ff9f-8p4qn\" (UID: \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\") " pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.348091 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-client-ca\") pod \"route-controller-manager-766676dc96-d44sg\" (UID: \"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328\") " pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.348121 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-serving-cert\") pod \"controller-manager-7f7d77ff9f-8p4qn\" (UID: \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\") " pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.348152 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rzp7\" (UniqueName: \"kubernetes.io/projected/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-kube-api-access-9rzp7\") pod \"route-controller-manager-766676dc96-d44sg\" (UID: \"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328\") " pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.348240 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-serving-cert\") pod \"route-controller-manager-766676dc96-d44sg\" (UID: \"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328\") " pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.348270 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-config\") pod \"controller-manager-7f7d77ff9f-8p4qn\" (UID: \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\") " pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.348332 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-client-ca\") pod \"controller-manager-7f7d77ff9f-8p4qn\" (UID: \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\") " pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.372712 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-tnscg" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" containerName="registry-server" probeResult="failure" output=< Feb 02 10:57:17 crc kubenswrapper[4838]: timeout: failed to connect service ":50051" within 1s Feb 02 10:57:17 crc kubenswrapper[4838]: > Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.449001 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-serving-cert\") pod \"route-controller-manager-766676dc96-d44sg\" (UID: \"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328\") " pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.449066 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-config\") pod \"controller-manager-7f7d77ff9f-8p4qn\" (UID: \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\") " pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.449133 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-client-ca\") pod \"controller-manager-7f7d77ff9f-8p4qn\" (UID: \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\") " pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.449187 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-config\") pod \"route-controller-manager-766676dc96-d44sg\" (UID: \"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328\") " pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.449226 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-proxy-ca-bundles\") pod \"controller-manager-7f7d77ff9f-8p4qn\" (UID: \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\") " pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.449287 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhrsj\" (UniqueName: \"kubernetes.io/projected/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-kube-api-access-bhrsj\") pod \"controller-manager-7f7d77ff9f-8p4qn\" (UID: \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\") " pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.449329 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-client-ca\") pod \"route-controller-manager-766676dc96-d44sg\" (UID: \"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328\") " pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.449359 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-serving-cert\") pod \"controller-manager-7f7d77ff9f-8p4qn\" (UID: \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\") " pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.449388 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rzp7\" (UniqueName: \"kubernetes.io/projected/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-kube-api-access-9rzp7\") pod \"route-controller-manager-766676dc96-d44sg\" (UID: \"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328\") " pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.455218 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-client-ca\") pod \"route-controller-manager-766676dc96-d44sg\" (UID: \"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328\") " pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.455764 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-config\") pod \"route-controller-manager-766676dc96-d44sg\" (UID: \"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328\") " pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.455960 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-client-ca\") pod \"controller-manager-7f7d77ff9f-8p4qn\" (UID: \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\") " pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.456312 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-proxy-ca-bundles\") pod \"controller-manager-7f7d77ff9f-8p4qn\" (UID: \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\") " pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.456518 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-serving-cert\") pod \"route-controller-manager-766676dc96-d44sg\" (UID: \"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328\") " pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.457331 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-config\") pod \"controller-manager-7f7d77ff9f-8p4qn\" (UID: \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\") " pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.460787 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-serving-cert\") pod \"controller-manager-7f7d77ff9f-8p4qn\" (UID: \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\") " pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.474061 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rzp7\" (UniqueName: \"kubernetes.io/projected/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-kube-api-access-9rzp7\") pod \"route-controller-manager-766676dc96-d44sg\" (UID: \"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328\") " pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.479495 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhrsj\" (UniqueName: \"kubernetes.io/projected/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-kube-api-access-bhrsj\") pod \"controller-manager-7f7d77ff9f-8p4qn\" (UID: \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\") " pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.507944 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.516555 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.750980 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pl5pl" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.791331 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pl5pl" Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.845599 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg"] Feb 02 10:57:17 crc kubenswrapper[4838]: W0202 10:57:17.861608 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7cf2e0d7_4a0e_492d_aeb2_60855bdd4328.slice/crio-d20a23227de8cde10c11f3fe22747925820b13551149c5d93aa190bbfe666b1c WatchSource:0}: Error finding container d20a23227de8cde10c11f3fe22747925820b13551149c5d93aa190bbfe666b1c: Status 404 returned error can't find the container with id d20a23227de8cde10c11f3fe22747925820b13551149c5d93aa190bbfe666b1c Feb 02 10:57:17 crc kubenswrapper[4838]: I0202 10:57:17.976207 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn"] Feb 02 10:57:17 crc kubenswrapper[4838]: W0202 10:57:17.986673 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b9efaa6_38f5_4e59_99b5_c16e0d93755e.slice/crio-b0513cdb80f7cf5a9af336e896f27ca31f2f88ebc76667f662d15deb51e81538 WatchSource:0}: Error finding container b0513cdb80f7cf5a9af336e896f27ca31f2f88ebc76667f662d15deb51e81538: Status 404 returned error can't find the container with id b0513cdb80f7cf5a9af336e896f27ca31f2f88ebc76667f662d15deb51e81538 Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.099997 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qwkhj" Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.100499 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qwkhj" Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.324719 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" event={"ID":"2b9efaa6-38f5-4e59-99b5-c16e0d93755e","Type":"ContainerStarted","Data":"a3dedabece85238e5b3773b44421da86c937bfd4bec3d7a726d17b23808a8db4"} Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.324773 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" event={"ID":"2b9efaa6-38f5-4e59-99b5-c16e0d93755e","Type":"ContainerStarted","Data":"b0513cdb80f7cf5a9af336e896f27ca31f2f88ebc76667f662d15deb51e81538"} Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.324986 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.329743 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" event={"ID":"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328","Type":"ContainerStarted","Data":"2b0d8fedd7e494a993e6019444b9575cadbe949a261b4964fd625dd2522c4bf6"} Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.329803 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" event={"ID":"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328","Type":"ContainerStarted","Data":"d20a23227de8cde10c11f3fe22747925820b13551149c5d93aa190bbfe666b1c"} Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.330637 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.330761 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.333122 4838 generic.go:334] "Generic (PLEG): container finished" podID="da4cb631-2c1a-4acd-8e10-cf1fbdb099e1" containerID="5ee06945882e6d23668e1ee0045622d8fc9978e288a285a3f2c1e7e78120e95e" exitCode=0 Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.333190 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g2rlc" event={"ID":"da4cb631-2c1a-4acd-8e10-cf1fbdb099e1","Type":"ContainerDied","Data":"5ee06945882e6d23668e1ee0045622d8fc9978e288a285a3f2c1e7e78120e95e"} Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.344017 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" podStartSLOduration=5.344000374 podStartE2EDuration="5.344000374s" podCreationTimestamp="2026-02-02 10:57:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:57:18.3427719 +0000 UTC m=+232.679872938" watchObservedRunningTime="2026-02-02 10:57:18.344000374 +0000 UTC m=+232.681101402" Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.384505 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" podStartSLOduration=5.384484939 podStartE2EDuration="5.384484939s" podCreationTimestamp="2026-02-02 10:57:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:57:18.380988591 +0000 UTC m=+232.718089619" watchObservedRunningTime="2026-02-02 10:57:18.384484939 +0000 UTC m=+232.721585967" Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.393667 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-2l7rj"] Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.724928 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g2rlc" Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.793998 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.868901 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8n2xt\" (UniqueName: \"kubernetes.io/projected/da4cb631-2c1a-4acd-8e10-cf1fbdb099e1-kube-api-access-8n2xt\") pod \"da4cb631-2c1a-4acd-8e10-cf1fbdb099e1\" (UID: \"da4cb631-2c1a-4acd-8e10-cf1fbdb099e1\") " Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.869020 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da4cb631-2c1a-4acd-8e10-cf1fbdb099e1-catalog-content\") pod \"da4cb631-2c1a-4acd-8e10-cf1fbdb099e1\" (UID: \"da4cb631-2c1a-4acd-8e10-cf1fbdb099e1\") " Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.869080 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da4cb631-2c1a-4acd-8e10-cf1fbdb099e1-utilities\") pod \"da4cb631-2c1a-4acd-8e10-cf1fbdb099e1\" (UID: \"da4cb631-2c1a-4acd-8e10-cf1fbdb099e1\") " Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.870181 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da4cb631-2c1a-4acd-8e10-cf1fbdb099e1-utilities" (OuterVolumeSpecName: "utilities") pod "da4cb631-2c1a-4acd-8e10-cf1fbdb099e1" (UID: "da4cb631-2c1a-4acd-8e10-cf1fbdb099e1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.883258 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da4cb631-2c1a-4acd-8e10-cf1fbdb099e1-kube-api-access-8n2xt" (OuterVolumeSpecName: "kube-api-access-8n2xt") pod "da4cb631-2c1a-4acd-8e10-cf1fbdb099e1" (UID: "da4cb631-2c1a-4acd-8e10-cf1fbdb099e1"). InnerVolumeSpecName "kube-api-access-8n2xt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.970460 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da4cb631-2c1a-4acd-8e10-cf1fbdb099e1-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.970506 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8n2xt\" (UniqueName: \"kubernetes.io/projected/da4cb631-2c1a-4acd-8e10-cf1fbdb099e1-kube-api-access-8n2xt\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:18 crc kubenswrapper[4838]: I0202 10:57:18.978036 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da4cb631-2c1a-4acd-8e10-cf1fbdb099e1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "da4cb631-2c1a-4acd-8e10-cf1fbdb099e1" (UID: "da4cb631-2c1a-4acd-8e10-cf1fbdb099e1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:57:19 crc kubenswrapper[4838]: I0202 10:57:19.071472 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da4cb631-2c1a-4acd-8e10-cf1fbdb099e1-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:19 crc kubenswrapper[4838]: I0202 10:57:19.152633 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-qwkhj" podUID="f98a7f3b-5730-4469-aef3-188a9755f566" containerName="registry-server" probeResult="failure" output=< Feb 02 10:57:19 crc kubenswrapper[4838]: timeout: failed to connect service ":50051" within 1s Feb 02 10:57:19 crc kubenswrapper[4838]: > Feb 02 10:57:19 crc kubenswrapper[4838]: I0202 10:57:19.339179 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g2rlc" Feb 02 10:57:19 crc kubenswrapper[4838]: I0202 10:57:19.340014 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g2rlc" event={"ID":"da4cb631-2c1a-4acd-8e10-cf1fbdb099e1","Type":"ContainerDied","Data":"0118338384c840f8bec0723a20d4883f5207c5c7993adb147f2be73c28e2067d"} Feb 02 10:57:19 crc kubenswrapper[4838]: I0202 10:57:19.340598 4838 scope.go:117] "RemoveContainer" containerID="5ee06945882e6d23668e1ee0045622d8fc9978e288a285a3f2c1e7e78120e95e" Feb 02 10:57:19 crc kubenswrapper[4838]: I0202 10:57:19.360861 4838 scope.go:117] "RemoveContainer" containerID="2bfb7d92833bd258bc226e3fe9c901474cdff71d36f88dcb95fe30e50f353d6e" Feb 02 10:57:19 crc kubenswrapper[4838]: I0202 10:57:19.363821 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g2rlc"] Feb 02 10:57:19 crc kubenswrapper[4838]: I0202 10:57:19.372117 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-g2rlc"] Feb 02 10:57:19 crc kubenswrapper[4838]: I0202 10:57:19.381717 4838 scope.go:117] "RemoveContainer" containerID="8603439ecebe2c5e456e6831392539e1ebded39a9d0aa184011c7bfdc4c08abe" Feb 02 10:57:20 crc kubenswrapper[4838]: I0202 10:57:20.518983 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da4cb631-2c1a-4acd-8e10-cf1fbdb099e1" path="/var/lib/kubelet/pods/da4cb631-2c1a-4acd-8e10-cf1fbdb099e1/volumes" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.470130 4838 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 02 10:57:23 crc kubenswrapper[4838]: E0202 10:57:23.470953 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da4cb631-2c1a-4acd-8e10-cf1fbdb099e1" containerName="extract-content" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.470973 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="da4cb631-2c1a-4acd-8e10-cf1fbdb099e1" containerName="extract-content" Feb 02 10:57:23 crc kubenswrapper[4838]: E0202 10:57:23.471017 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da4cb631-2c1a-4acd-8e10-cf1fbdb099e1" containerName="extract-utilities" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.471029 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="da4cb631-2c1a-4acd-8e10-cf1fbdb099e1" containerName="extract-utilities" Feb 02 10:57:23 crc kubenswrapper[4838]: E0202 10:57:23.471047 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da4cb631-2c1a-4acd-8e10-cf1fbdb099e1" containerName="registry-server" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.471060 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="da4cb631-2c1a-4acd-8e10-cf1fbdb099e1" containerName="registry-server" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.471301 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="da4cb631-2c1a-4acd-8e10-cf1fbdb099e1" containerName="registry-server" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.472013 4838 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.472249 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.472455 4838 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.472679 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a" gracePeriod=15 Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.472739 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06" gracePeriod=15 Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.472811 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740" gracePeriod=15 Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.472836 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d" gracePeriod=15 Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.472845 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400" gracePeriod=15 Feb 02 10:57:23 crc kubenswrapper[4838]: E0202 10:57:23.473101 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.473151 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 10:57:23 crc kubenswrapper[4838]: E0202 10:57:23.473177 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.473196 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Feb 02 10:57:23 crc kubenswrapper[4838]: E0202 10:57:23.473228 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.473246 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 02 10:57:23 crc kubenswrapper[4838]: E0202 10:57:23.473276 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.473295 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 02 10:57:23 crc kubenswrapper[4838]: E0202 10:57:23.473323 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.473340 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 02 10:57:23 crc kubenswrapper[4838]: E0202 10:57:23.473367 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.473386 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 10:57:23 crc kubenswrapper[4838]: E0202 10:57:23.473408 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.473425 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.473794 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.473832 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.473859 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.473882 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.473905 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.473923 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.544420 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.544519 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.544699 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.544768 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.545303 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.545544 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.545759 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.545822 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.564755 4838 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Readiness probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]log ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]api-openshift-apiserver-available ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]api-openshift-oauth-apiserver-available ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]informer-sync ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/openshift.io-api-request-count-filter ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/openshift.io-startkubeinformers ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/start-apiserver-admission-initializer ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/generic-apiserver-start-informers ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/priority-and-fairness-config-consumer ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/priority-and-fairness-filter ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/storage-object-count-tracker-hook ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/start-apiextensions-informers ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/start-apiextensions-controllers ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/crd-informer-synced ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/start-system-namespaces-controller ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/start-cluster-authentication-info-controller ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/start-legacy-token-tracking-controller ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/start-service-ip-repair-controllers ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/rbac/bootstrap-roles ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/scheduling/bootstrap-system-priority-classes ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/priority-and-fairness-config-producer ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/bootstrap-controller ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/start-kube-aggregator-informers ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/apiservice-status-local-available-controller ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/apiservice-status-remote-available-controller ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/apiservice-registration-controller ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/apiservice-wait-for-first-sync ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/apiservice-discovery-controller ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/kube-apiserver-autoregistration ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]autoregister-completion ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/apiservice-openapi-controller ok Feb 02 10:57:23 crc kubenswrapper[4838]: [+]poststarthook/apiservice-openapiv3-controller ok Feb 02 10:57:23 crc kubenswrapper[4838]: [-]shutdown failed: reason withheld Feb 02 10:57:23 crc kubenswrapper[4838]: readyz check failed Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.564843 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.647232 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.647292 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.647318 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.647375 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.647405 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.647441 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.647468 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.647488 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.647561 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.647608 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.647670 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.647703 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.647733 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.647760 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.647790 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 10:57:23 crc kubenswrapper[4838]: I0202 10:57:23.647820 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 10:57:24 crc kubenswrapper[4838]: I0202 10:57:24.376105 4838 generic.go:334] "Generic (PLEG): container finished" podID="2a16c769-012d-4a30-b9db-5629ed018ef8" containerID="7a4ab72405ec81f4fdbbcd867ddfdab8f8f3e8d9a8f3e00172852c3acee8c747" exitCode=0 Feb 02 10:57:24 crc kubenswrapper[4838]: I0202 10:57:24.376238 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"2a16c769-012d-4a30-b9db-5629ed018ef8","Type":"ContainerDied","Data":"7a4ab72405ec81f4fdbbcd867ddfdab8f8f3e8d9a8f3e00172852c3acee8c747"} Feb 02 10:57:24 crc kubenswrapper[4838]: I0202 10:57:24.379243 4838 status_manager.go:851] "Failed to get status for pod" podUID="2a16c769-012d-4a30-b9db-5629ed018ef8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:24 crc kubenswrapper[4838]: I0202 10:57:24.379688 4838 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:24 crc kubenswrapper[4838]: I0202 10:57:24.380886 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 02 10:57:24 crc kubenswrapper[4838]: I0202 10:57:24.383605 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 02 10:57:24 crc kubenswrapper[4838]: I0202 10:57:24.385358 4838 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740" exitCode=0 Feb 02 10:57:24 crc kubenswrapper[4838]: I0202 10:57:24.385423 4838 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06" exitCode=0 Feb 02 10:57:24 crc kubenswrapper[4838]: I0202 10:57:24.385447 4838 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d" exitCode=0 Feb 02 10:57:24 crc kubenswrapper[4838]: I0202 10:57:24.385466 4838 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400" exitCode=2 Feb 02 10:57:24 crc kubenswrapper[4838]: I0202 10:57:24.385522 4838 scope.go:117] "RemoveContainer" containerID="7025d96a47690ca88f5f64caec19b45035e9c2df05e14a80b0e25a56950996b0" Feb 02 10:57:24 crc kubenswrapper[4838]: E0202 10:57:24.561277 4838 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.241:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" volumeName="registry-storage" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.399284 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.471793 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rbh6x" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.471861 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rbh6x" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.605538 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rbh6x" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.607010 4838 status_manager.go:851] "Failed to get status for pod" podUID="2a16c769-012d-4a30-b9db-5629ed018ef8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.607356 4838 status_manager.go:851] "Failed to get status for pod" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" pod="openshift-marketplace/certified-operators-rbh6x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rbh6x\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.869985 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.870912 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.871436 4838 status_manager.go:851] "Failed to get status for pod" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" pod="openshift-marketplace/certified-operators-rbh6x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rbh6x\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.871654 4838 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.871874 4838 status_manager.go:851] "Failed to get status for pod" podUID="2a16c769-012d-4a30-b9db-5629ed018ef8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.871893 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.872411 4838 status_manager.go:851] "Failed to get status for pod" podUID="2a16c769-012d-4a30-b9db-5629ed018ef8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.872862 4838 status_manager.go:851] "Failed to get status for pod" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" pod="openshift-marketplace/certified-operators-rbh6x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rbh6x\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.873606 4838 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.883743 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.883822 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a16c769-012d-4a30-b9db-5629ed018ef8-kube-api-access\") pod \"2a16c769-012d-4a30-b9db-5629ed018ef8\" (UID: \"2a16c769-012d-4a30-b9db-5629ed018ef8\") " Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.883871 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.883995 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2a16c769-012d-4a30-b9db-5629ed018ef8-var-lock\") pod \"2a16c769-012d-4a30-b9db-5629ed018ef8\" (UID: \"2a16c769-012d-4a30-b9db-5629ed018ef8\") " Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.883996 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.884074 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.884076 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.884122 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.884122 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2a16c769-012d-4a30-b9db-5629ed018ef8-var-lock" (OuterVolumeSpecName: "var-lock") pod "2a16c769-012d-4a30-b9db-5629ed018ef8" (UID: "2a16c769-012d-4a30-b9db-5629ed018ef8"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.884190 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2a16c769-012d-4a30-b9db-5629ed018ef8-kubelet-dir\") pod \"2a16c769-012d-4a30-b9db-5629ed018ef8\" (UID: \"2a16c769-012d-4a30-b9db-5629ed018ef8\") " Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.884260 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2a16c769-012d-4a30-b9db-5629ed018ef8-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2a16c769-012d-4a30-b9db-5629ed018ef8" (UID: "2a16c769-012d-4a30-b9db-5629ed018ef8"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.884738 4838 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.884763 4838 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.884778 4838 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2a16c769-012d-4a30-b9db-5629ed018ef8-var-lock\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.884789 4838 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.884800 4838 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2a16c769-012d-4a30-b9db-5629ed018ef8-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.891301 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a16c769-012d-4a30-b9db-5629ed018ef8-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2a16c769-012d-4a30-b9db-5629ed018ef8" (UID: "2a16c769-012d-4a30-b9db-5629ed018ef8"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:57:25 crc kubenswrapper[4838]: I0202 10:57:25.986814 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a16c769-012d-4a30-b9db-5629ed018ef8-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.366518 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tnscg" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.367165 4838 status_manager.go:851] "Failed to get status for pod" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" pod="openshift-marketplace/redhat-marketplace-tnscg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tnscg\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.367716 4838 status_manager.go:851] "Failed to get status for pod" podUID="2a16c769-012d-4a30-b9db-5629ed018ef8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.368156 4838 status_manager.go:851] "Failed to get status for pod" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" pod="openshift-marketplace/certified-operators-rbh6x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rbh6x\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.368879 4838 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.406971 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"2a16c769-012d-4a30-b9db-5629ed018ef8","Type":"ContainerDied","Data":"517725d0e05c24deecf3233357cc05eb6511e22587a9417728dceec73851bacf"} Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.407013 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="517725d0e05c24deecf3233357cc05eb6511e22587a9417728dceec73851bacf" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.407077 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.410331 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.410960 4838 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a" exitCode=0 Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.411805 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.411830 4838 scope.go:117] "RemoveContainer" containerID="ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.428134 4838 status_manager.go:851] "Failed to get status for pod" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" pod="openshift-marketplace/certified-operators-rbh6x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rbh6x\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.428508 4838 scope.go:117] "RemoveContainer" containerID="1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.428742 4838 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.429079 4838 status_manager.go:851] "Failed to get status for pod" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" pod="openshift-marketplace/redhat-marketplace-tnscg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tnscg\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.429359 4838 status_manager.go:851] "Failed to get status for pod" podUID="2a16c769-012d-4a30-b9db-5629ed018ef8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.429577 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tnscg" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.429794 4838 status_manager.go:851] "Failed to get status for pod" podUID="2a16c769-012d-4a30-b9db-5629ed018ef8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.430123 4838 status_manager.go:851] "Failed to get status for pod" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" pod="openshift-marketplace/certified-operators-rbh6x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rbh6x\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.431500 4838 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.431867 4838 status_manager.go:851] "Failed to get status for pod" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" pod="openshift-marketplace/redhat-marketplace-tnscg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tnscg\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.432350 4838 status_manager.go:851] "Failed to get status for pod" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" pod="openshift-marketplace/certified-operators-rbh6x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rbh6x\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.432767 4838 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.433041 4838 status_manager.go:851] "Failed to get status for pod" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" pod="openshift-marketplace/redhat-marketplace-tnscg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tnscg\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.433274 4838 status_manager.go:851] "Failed to get status for pod" podUID="2a16c769-012d-4a30-b9db-5629ed018ef8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.448892 4838 scope.go:117] "RemoveContainer" containerID="8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.449824 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rbh6x" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.450390 4838 status_manager.go:851] "Failed to get status for pod" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" pod="openshift-marketplace/certified-operators-rbh6x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rbh6x\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.450840 4838 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.451294 4838 status_manager.go:851] "Failed to get status for pod" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" pod="openshift-marketplace/redhat-marketplace-tnscg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tnscg\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.451829 4838 status_manager.go:851] "Failed to get status for pod" podUID="2a16c769-012d-4a30-b9db-5629ed018ef8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.461107 4838 scope.go:117] "RemoveContainer" containerID="0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.473495 4838 scope.go:117] "RemoveContainer" containerID="a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.486760 4838 scope.go:117] "RemoveContainer" containerID="7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.502473 4838 scope.go:117] "RemoveContainer" containerID="ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740" Feb 02 10:57:26 crc kubenswrapper[4838]: E0202 10:57:26.502963 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\": container with ID starting with ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740 not found: ID does not exist" containerID="ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.503004 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740"} err="failed to get container status \"ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\": rpc error: code = NotFound desc = could not find container \"ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740\": container with ID starting with ee3d69dc0b348603e7c1af48a7548e7f7af0f1e44eabe67c9d40b3c952a9e740 not found: ID does not exist" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.503028 4838 scope.go:117] "RemoveContainer" containerID="1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06" Feb 02 10:57:26 crc kubenswrapper[4838]: E0202 10:57:26.503478 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\": container with ID starting with 1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06 not found: ID does not exist" containerID="1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.503507 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06"} err="failed to get container status \"1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\": rpc error: code = NotFound desc = could not find container \"1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06\": container with ID starting with 1a8dfd577a672ebd43ba1595f2fb8a01ada6f9c222080a98bba6b477a6af2f06 not found: ID does not exist" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.503523 4838 scope.go:117] "RemoveContainer" containerID="8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d" Feb 02 10:57:26 crc kubenswrapper[4838]: E0202 10:57:26.503865 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\": container with ID starting with 8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d not found: ID does not exist" containerID="8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.503916 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d"} err="failed to get container status \"8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\": rpc error: code = NotFound desc = could not find container \"8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d\": container with ID starting with 8e68361c896ee6a4d737b4f4819935e516c9dd5efcc029b22172318d8c7b187d not found: ID does not exist" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.503933 4838 scope.go:117] "RemoveContainer" containerID="0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400" Feb 02 10:57:26 crc kubenswrapper[4838]: E0202 10:57:26.504726 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\": container with ID starting with 0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400 not found: ID does not exist" containerID="0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.504747 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400"} err="failed to get container status \"0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\": rpc error: code = NotFound desc = could not find container \"0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400\": container with ID starting with 0e03b6e2b66ad541c614614a18d8867192218f31c9bcc2d5fa17d28930ef6400 not found: ID does not exist" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.504780 4838 scope.go:117] "RemoveContainer" containerID="a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a" Feb 02 10:57:26 crc kubenswrapper[4838]: E0202 10:57:26.506094 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\": container with ID starting with a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a not found: ID does not exist" containerID="a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.506131 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a"} err="failed to get container status \"a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\": rpc error: code = NotFound desc = could not find container \"a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a\": container with ID starting with a79c927302680f1e9a59de498edac7f3209e7442219144907f968132a6e31d0a not found: ID does not exist" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.506151 4838 scope.go:117] "RemoveContainer" containerID="7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97" Feb 02 10:57:26 crc kubenswrapper[4838]: E0202 10:57:26.506496 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\": container with ID starting with 7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97 not found: ID does not exist" containerID="7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.506542 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97"} err="failed to get container status \"7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\": rpc error: code = NotFound desc = could not find container \"7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97\": container with ID starting with 7ebe033c32f08bb5f6a682338e9b4a0d21fd2e2ca505b043fc77c9318ffbfc97 not found: ID does not exist" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.508558 4838 status_manager.go:851] "Failed to get status for pod" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" pod="openshift-marketplace/certified-operators-rbh6x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rbh6x\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.508774 4838 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.508947 4838 status_manager.go:851] "Failed to get status for pod" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" pod="openshift-marketplace/redhat-marketplace-tnscg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tnscg\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.509175 4838 status_manager.go:851] "Failed to get status for pod" podUID="2a16c769-012d-4a30-b9db-5629ed018ef8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.515031 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Feb 02 10:57:26 crc kubenswrapper[4838]: E0202 10:57:26.703169 4838 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: E0202 10:57:26.704021 4838 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: E0202 10:57:26.704307 4838 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: E0202 10:57:26.704678 4838 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: E0202 10:57:26.705261 4838 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:26 crc kubenswrapper[4838]: I0202 10:57:26.705333 4838 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Feb 02 10:57:26 crc kubenswrapper[4838]: E0202 10:57:26.705570 4838 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.241:6443: connect: connection refused" interval="200ms" Feb 02 10:57:26 crc kubenswrapper[4838]: E0202 10:57:26.906725 4838 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.241:6443: connect: connection refused" interval="400ms" Feb 02 10:57:27 crc kubenswrapper[4838]: E0202 10:57:27.308847 4838 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.241:6443: connect: connection refused" interval="800ms" Feb 02 10:57:28 crc kubenswrapper[4838]: E0202 10:57:28.110369 4838 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.241:6443: connect: connection refused" interval="1.6s" Feb 02 10:57:28 crc kubenswrapper[4838]: I0202 10:57:28.163604 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qwkhj" Feb 02 10:57:28 crc kubenswrapper[4838]: I0202 10:57:28.164406 4838 status_manager.go:851] "Failed to get status for pod" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" pod="openshift-marketplace/redhat-marketplace-tnscg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tnscg\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:28 crc kubenswrapper[4838]: I0202 10:57:28.164912 4838 status_manager.go:851] "Failed to get status for pod" podUID="f98a7f3b-5730-4469-aef3-188a9755f566" pod="openshift-marketplace/redhat-operators-qwkhj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qwkhj\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:28 crc kubenswrapper[4838]: I0202 10:57:28.166246 4838 status_manager.go:851] "Failed to get status for pod" podUID="2a16c769-012d-4a30-b9db-5629ed018ef8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:28 crc kubenswrapper[4838]: I0202 10:57:28.166719 4838 status_manager.go:851] "Failed to get status for pod" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" pod="openshift-marketplace/certified-operators-rbh6x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rbh6x\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:28 crc kubenswrapper[4838]: I0202 10:57:28.228734 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qwkhj" Feb 02 10:57:28 crc kubenswrapper[4838]: I0202 10:57:28.229469 4838 status_manager.go:851] "Failed to get status for pod" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" pod="openshift-marketplace/certified-operators-rbh6x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rbh6x\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:28 crc kubenswrapper[4838]: I0202 10:57:28.229999 4838 status_manager.go:851] "Failed to get status for pod" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" pod="openshift-marketplace/redhat-marketplace-tnscg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tnscg\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:28 crc kubenswrapper[4838]: I0202 10:57:28.230572 4838 status_manager.go:851] "Failed to get status for pod" podUID="f98a7f3b-5730-4469-aef3-188a9755f566" pod="openshift-marketplace/redhat-operators-qwkhj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qwkhj\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:28 crc kubenswrapper[4838]: I0202 10:57:28.230969 4838 status_manager.go:851] "Failed to get status for pod" podUID="2a16c769-012d-4a30-b9db-5629ed018ef8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:28 crc kubenswrapper[4838]: E0202 10:57:28.516836 4838 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.241:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 10:57:28 crc kubenswrapper[4838]: I0202 10:57:28.517467 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 10:57:28 crc kubenswrapper[4838]: W0202 10:57:28.550783 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-cc2c2081ea80d881ab4686fa05d55c46aa33ed3dc372f6bf7850483d5cd93cd7 WatchSource:0}: Error finding container cc2c2081ea80d881ab4686fa05d55c46aa33ed3dc372f6bf7850483d5cd93cd7: Status 404 returned error can't find the container with id cc2c2081ea80d881ab4686fa05d55c46aa33ed3dc372f6bf7850483d5cd93cd7 Feb 02 10:57:28 crc kubenswrapper[4838]: E0202 10:57:28.554518 4838 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.241:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.189068c4ece6561a openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 10:57:28.554001946 +0000 UTC m=+242.891103014,LastTimestamp:2026-02-02 10:57:28.554001946 +0000 UTC m=+242.891103014,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 10:57:29 crc kubenswrapper[4838]: I0202 10:57:29.430112 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"425211004a2eabea8507c01fca1d058f6e41bf730d042651c9cfbde4958b31bc"} Feb 02 10:57:29 crc kubenswrapper[4838]: I0202 10:57:29.430462 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"cc2c2081ea80d881ab4686fa05d55c46aa33ed3dc372f6bf7850483d5cd93cd7"} Feb 02 10:57:29 crc kubenswrapper[4838]: E0202 10:57:29.711022 4838 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.241:6443: connect: connection refused" interval="3.2s" Feb 02 10:57:30 crc kubenswrapper[4838]: E0202 10:57:30.436827 4838 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.241:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 10:57:30 crc kubenswrapper[4838]: I0202 10:57:30.437173 4838 status_manager.go:851] "Failed to get status for pod" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" pod="openshift-marketplace/certified-operators-rbh6x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rbh6x\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:30 crc kubenswrapper[4838]: I0202 10:57:30.437941 4838 status_manager.go:851] "Failed to get status for pod" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" pod="openshift-marketplace/redhat-marketplace-tnscg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tnscg\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:30 crc kubenswrapper[4838]: I0202 10:57:30.438338 4838 status_manager.go:851] "Failed to get status for pod" podUID="f98a7f3b-5730-4469-aef3-188a9755f566" pod="openshift-marketplace/redhat-operators-qwkhj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qwkhj\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:30 crc kubenswrapper[4838]: I0202 10:57:30.438903 4838 status_manager.go:851] "Failed to get status for pod" podUID="2a16c769-012d-4a30-b9db-5629ed018ef8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:32 crc kubenswrapper[4838]: E0202 10:57:32.912270 4838 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.241:6443: connect: connection refused" interval="6.4s" Feb 02 10:57:34 crc kubenswrapper[4838]: E0202 10:57:34.074712 4838 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.241:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.189068c4ece6561a openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 10:57:28.554001946 +0000 UTC m=+242.891103014,LastTimestamp:2026-02-02 10:57:28.554001946 +0000 UTC m=+242.891103014,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 10:57:36 crc kubenswrapper[4838]: I0202 10:57:36.510785 4838 status_manager.go:851] "Failed to get status for pod" podUID="2a16c769-012d-4a30-b9db-5629ed018ef8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:36 crc kubenswrapper[4838]: I0202 10:57:36.511497 4838 status_manager.go:851] "Failed to get status for pod" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" pod="openshift-marketplace/certified-operators-rbh6x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rbh6x\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:36 crc kubenswrapper[4838]: I0202 10:57:36.512013 4838 status_manager.go:851] "Failed to get status for pod" podUID="f98a7f3b-5730-4469-aef3-188a9755f566" pod="openshift-marketplace/redhat-operators-qwkhj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qwkhj\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:36 crc kubenswrapper[4838]: I0202 10:57:36.512442 4838 status_manager.go:851] "Failed to get status for pod" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" pod="openshift-marketplace/redhat-marketplace-tnscg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tnscg\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:37 crc kubenswrapper[4838]: I0202 10:57:37.504953 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:37 crc kubenswrapper[4838]: I0202 10:57:37.507460 4838 status_manager.go:851] "Failed to get status for pod" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" pod="openshift-marketplace/certified-operators-rbh6x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rbh6x\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:37 crc kubenswrapper[4838]: I0202 10:57:37.508759 4838 status_manager.go:851] "Failed to get status for pod" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" pod="openshift-marketplace/redhat-marketplace-tnscg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tnscg\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:37 crc kubenswrapper[4838]: I0202 10:57:37.509363 4838 status_manager.go:851] "Failed to get status for pod" podUID="f98a7f3b-5730-4469-aef3-188a9755f566" pod="openshift-marketplace/redhat-operators-qwkhj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qwkhj\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:37 crc kubenswrapper[4838]: I0202 10:57:37.510063 4838 status_manager.go:851] "Failed to get status for pod" podUID="2a16c769-012d-4a30-b9db-5629ed018ef8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:37 crc kubenswrapper[4838]: I0202 10:57:37.535352 4838 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cfe56663-b047-48b0-864b-53bd2a18f1be" Feb 02 10:57:37 crc kubenswrapper[4838]: I0202 10:57:37.535403 4838 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cfe56663-b047-48b0-864b-53bd2a18f1be" Feb 02 10:57:37 crc kubenswrapper[4838]: E0202 10:57:37.536044 4838 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:37 crc kubenswrapper[4838]: I0202 10:57:37.536867 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:37 crc kubenswrapper[4838]: W0202 10:57:37.569506 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-dbea1cc05b3baf9ebba01d9ae298625a1523d8e28dbd569b573740e16043070c WatchSource:0}: Error finding container dbea1cc05b3baf9ebba01d9ae298625a1523d8e28dbd569b573740e16043070c: Status 404 returned error can't find the container with id dbea1cc05b3baf9ebba01d9ae298625a1523d8e28dbd569b573740e16043070c Feb 02 10:57:38 crc kubenswrapper[4838]: I0202 10:57:38.319389 4838 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Readiness probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Feb 02 10:57:38 crc kubenswrapper[4838]: I0202 10:57:38.320015 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Feb 02 10:57:38 crc kubenswrapper[4838]: I0202 10:57:38.501434 4838 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="d6158a4cbb89a3c7efe1960ee62e2b95432f04f40c0476a6393b5da643d86ca1" exitCode=0 Feb 02 10:57:38 crc kubenswrapper[4838]: I0202 10:57:38.501605 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"d6158a4cbb89a3c7efe1960ee62e2b95432f04f40c0476a6393b5da643d86ca1"} Feb 02 10:57:38 crc kubenswrapper[4838]: I0202 10:57:38.501742 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"dbea1cc05b3baf9ebba01d9ae298625a1523d8e28dbd569b573740e16043070c"} Feb 02 10:57:38 crc kubenswrapper[4838]: I0202 10:57:38.502437 4838 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cfe56663-b047-48b0-864b-53bd2a18f1be" Feb 02 10:57:38 crc kubenswrapper[4838]: I0202 10:57:38.502493 4838 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cfe56663-b047-48b0-864b-53bd2a18f1be" Feb 02 10:57:38 crc kubenswrapper[4838]: I0202 10:57:38.502998 4838 status_manager.go:851] "Failed to get status for pod" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" pod="openshift-marketplace/redhat-marketplace-tnscg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tnscg\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:38 crc kubenswrapper[4838]: E0202 10:57:38.503267 4838 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:38 crc kubenswrapper[4838]: I0202 10:57:38.503494 4838 status_manager.go:851] "Failed to get status for pod" podUID="f98a7f3b-5730-4469-aef3-188a9755f566" pod="openshift-marketplace/redhat-operators-qwkhj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qwkhj\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:38 crc kubenswrapper[4838]: I0202 10:57:38.503808 4838 status_manager.go:851] "Failed to get status for pod" podUID="2a16c769-012d-4a30-b9db-5629ed018ef8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:38 crc kubenswrapper[4838]: I0202 10:57:38.504336 4838 status_manager.go:851] "Failed to get status for pod" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" pod="openshift-marketplace/certified-operators-rbh6x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rbh6x\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:38 crc kubenswrapper[4838]: I0202 10:57:38.506543 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Feb 02 10:57:38 crc kubenswrapper[4838]: I0202 10:57:38.506581 4838 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590" exitCode=1 Feb 02 10:57:38 crc kubenswrapper[4838]: I0202 10:57:38.517409 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590"} Feb 02 10:57:38 crc kubenswrapper[4838]: I0202 10:57:38.518060 4838 scope.go:117] "RemoveContainer" containerID="d3fe4450c6cfaeb8d8894fb6eec7aeb4f1c4f2d1d20c5838e6d69303b11f3590" Feb 02 10:57:38 crc kubenswrapper[4838]: I0202 10:57:38.518273 4838 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:38 crc kubenswrapper[4838]: I0202 10:57:38.519118 4838 status_manager.go:851] "Failed to get status for pod" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" pod="openshift-marketplace/certified-operators-rbh6x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rbh6x\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:38 crc kubenswrapper[4838]: I0202 10:57:38.519741 4838 status_manager.go:851] "Failed to get status for pod" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" pod="openshift-marketplace/redhat-marketplace-tnscg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tnscg\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:38 crc kubenswrapper[4838]: I0202 10:57:38.520198 4838 status_manager.go:851] "Failed to get status for pod" podUID="f98a7f3b-5730-4469-aef3-188a9755f566" pod="openshift-marketplace/redhat-operators-qwkhj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-qwkhj\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:38 crc kubenswrapper[4838]: I0202 10:57:38.520794 4838 status_manager.go:851] "Failed to get status for pod" podUID="2a16c769-012d-4a30-b9db-5629ed018ef8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.241:6443: connect: connection refused" Feb 02 10:57:39 crc kubenswrapper[4838]: I0202 10:57:39.514813 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Feb 02 10:57:39 crc kubenswrapper[4838]: I0202 10:57:39.515231 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"61d879d1098d17b900614e3a8375d769eca8474088e014b352dc3dd79474fc1a"} Feb 02 10:57:39 crc kubenswrapper[4838]: I0202 10:57:39.518470 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"de27f399367652178ea05e3e7a7cfb202edfed1c6b0027a21bec6b2fbee16b94"} Feb 02 10:57:39 crc kubenswrapper[4838]: I0202 10:57:39.518761 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"6a27ab9f3b4673db45d7d43fb69c2298429a7280d1b29238229f62ee04250c35"} Feb 02 10:57:39 crc kubenswrapper[4838]: I0202 10:57:39.518772 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"c77341b33e4291717c4ae8558aa7a6f6ae9136bbbd6f201ad20e4a7bfaf426f8"} Feb 02 10:57:40 crc kubenswrapper[4838]: I0202 10:57:40.530938 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"2ab4076db835c40b599fc48dbf40495da04853574f4432a13bb6f3aac7c454ed"} Feb 02 10:57:41 crc kubenswrapper[4838]: I0202 10:57:41.541809 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"c1c61831c2492f4d318f21555bead70ab748c835af05d0d5b1a541261ddc9449"} Feb 02 10:57:41 crc kubenswrapper[4838]: I0202 10:57:41.542181 4838 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cfe56663-b047-48b0-864b-53bd2a18f1be" Feb 02 10:57:41 crc kubenswrapper[4838]: I0202 10:57:41.542206 4838 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cfe56663-b047-48b0-864b-53bd2a18f1be" Feb 02 10:57:41 crc kubenswrapper[4838]: I0202 10:57:41.542471 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:42 crc kubenswrapper[4838]: I0202 10:57:42.537493 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:42 crc kubenswrapper[4838]: I0202 10:57:42.537873 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:42 crc kubenswrapper[4838]: I0202 10:57:42.544412 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:43 crc kubenswrapper[4838]: I0202 10:57:43.438744 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" podUID="5a08fba0-5dfa-4dc5-8cf9-aa4580244e37" containerName="oauth-openshift" containerID="cri-o://cc6694aa46b51203a530a155da47ab5bf4aabd583443aabe9c2a3d08fad7a04f" gracePeriod=15 Feb 02 10:57:43 crc kubenswrapper[4838]: I0202 10:57:43.961304 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.089859 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-template-login\") pod \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.089979 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-template-provider-selection\") pod \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.090029 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-trusted-ca-bundle\") pod \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.090074 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6p62\" (UniqueName: \"kubernetes.io/projected/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-kube-api-access-r6p62\") pod \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.090158 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-cliconfig\") pod \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.090213 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-service-ca\") pod \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.090253 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-audit-dir\") pod \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.090294 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-idp-0-file-data\") pod \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.090339 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-ocp-branding-template\") pod \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.090435 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-audit-policies\") pod \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.090489 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-router-certs\") pod \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.090525 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-template-error\") pod \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.090559 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-serving-cert\") pod \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.090598 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-session\") pod \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\" (UID: \"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37\") " Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.090636 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37" (UID: "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.090964 4838 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-audit-dir\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.092078 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37" (UID: "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.092311 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37" (UID: "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.093026 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37" (UID: "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.093277 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37" (UID: "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.101472 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37" (UID: "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.102989 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-kube-api-access-r6p62" (OuterVolumeSpecName: "kube-api-access-r6p62") pod "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37" (UID: "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37"). InnerVolumeSpecName "kube-api-access-r6p62". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.103471 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37" (UID: "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.104008 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37" (UID: "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.104997 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37" (UID: "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.106183 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37" (UID: "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.107584 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37" (UID: "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.115174 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37" (UID: "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.115663 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37" (UID: "5a08fba0-5dfa-4dc5-8cf9-aa4580244e37"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.192419 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.192449 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.192461 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.192473 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6p62\" (UniqueName: \"kubernetes.io/projected/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-kube-api-access-r6p62\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.192483 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.192493 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.192501 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.192510 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.192519 4838 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.192527 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.192537 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.192546 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.192555 4838 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.572512 4838 generic.go:334] "Generic (PLEG): container finished" podID="5a08fba0-5dfa-4dc5-8cf9-aa4580244e37" containerID="cc6694aa46b51203a530a155da47ab5bf4aabd583443aabe9c2a3d08fad7a04f" exitCode=0 Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.572572 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" event={"ID":"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37","Type":"ContainerDied","Data":"cc6694aa46b51203a530a155da47ab5bf4aabd583443aabe9c2a3d08fad7a04f"} Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.572634 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" event={"ID":"5a08fba0-5dfa-4dc5-8cf9-aa4580244e37","Type":"ContainerDied","Data":"489befa8799461aa0ce285403b791a2f9ec4b0d1f12d04349d14b9bd047b578f"} Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.572646 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-2l7rj" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.572667 4838 scope.go:117] "RemoveContainer" containerID="cc6694aa46b51203a530a155da47ab5bf4aabd583443aabe9c2a3d08fad7a04f" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.596680 4838 scope.go:117] "RemoveContainer" containerID="cc6694aa46b51203a530a155da47ab5bf4aabd583443aabe9c2a3d08fad7a04f" Feb 02 10:57:44 crc kubenswrapper[4838]: E0202 10:57:44.597230 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc6694aa46b51203a530a155da47ab5bf4aabd583443aabe9c2a3d08fad7a04f\": container with ID starting with cc6694aa46b51203a530a155da47ab5bf4aabd583443aabe9c2a3d08fad7a04f not found: ID does not exist" containerID="cc6694aa46b51203a530a155da47ab5bf4aabd583443aabe9c2a3d08fad7a04f" Feb 02 10:57:44 crc kubenswrapper[4838]: I0202 10:57:44.597283 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc6694aa46b51203a530a155da47ab5bf4aabd583443aabe9c2a3d08fad7a04f"} err="failed to get container status \"cc6694aa46b51203a530a155da47ab5bf4aabd583443aabe9c2a3d08fad7a04f\": rpc error: code = NotFound desc = could not find container \"cc6694aa46b51203a530a155da47ab5bf4aabd583443aabe9c2a3d08fad7a04f\": container with ID starting with cc6694aa46b51203a530a155da47ab5bf4aabd583443aabe9c2a3d08fad7a04f not found: ID does not exist" Feb 02 10:57:45 crc kubenswrapper[4838]: E0202 10:57:45.786206 4838 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"audit\": Failed to watch *v1.ConfigMap: unknown (get configmaps)" logger="UnhandledError" Feb 02 10:57:45 crc kubenswrapper[4838]: E0202 10:57:45.993543 4838 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"oauth-openshift-dockercfg-znhcc\": Failed to watch *v1.Secret: unknown (get secrets)" logger="UnhandledError" Feb 02 10:57:46 crc kubenswrapper[4838]: E0202 10:57:46.309558 4838 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: unknown (get configmaps)" logger="UnhandledError" Feb 02 10:57:46 crc kubenswrapper[4838]: I0202 10:57:46.557335 4838 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:46 crc kubenswrapper[4838]: I0202 10:57:46.594310 4838 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cfe56663-b047-48b0-864b-53bd2a18f1be" Feb 02 10:57:46 crc kubenswrapper[4838]: I0202 10:57:46.594343 4838 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cfe56663-b047-48b0-864b-53bd2a18f1be" Feb 02 10:57:46 crc kubenswrapper[4838]: I0202 10:57:46.601821 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:46 crc kubenswrapper[4838]: I0202 10:57:46.605821 4838 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="caf8c0ca-0648-4597-a4fd-d23a48f61321" Feb 02 10:57:47 crc kubenswrapper[4838]: I0202 10:57:47.196084 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 10:57:47 crc kubenswrapper[4838]: I0202 10:57:47.200656 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 10:57:47 crc kubenswrapper[4838]: I0202 10:57:47.600284 4838 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cfe56663-b047-48b0-864b-53bd2a18f1be" Feb 02 10:57:47 crc kubenswrapper[4838]: I0202 10:57:47.600320 4838 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cfe56663-b047-48b0-864b-53bd2a18f1be" Feb 02 10:57:47 crc kubenswrapper[4838]: I0202 10:57:47.600531 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 10:57:48 crc kubenswrapper[4838]: I0202 10:57:48.335132 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 10:57:55 crc kubenswrapper[4838]: I0202 10:57:55.370769 4838 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Feb 02 10:57:55 crc kubenswrapper[4838]: I0202 10:57:55.378725 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-558db77b4-2l7rj"] Feb 02 10:57:55 crc kubenswrapper[4838]: I0202 10:57:55.378831 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 02 10:57:55 crc kubenswrapper[4838]: I0202 10:57:55.385742 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 10:57:55 crc kubenswrapper[4838]: I0202 10:57:55.414386 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=9.414360183 podStartE2EDuration="9.414360183s" podCreationTimestamp="2026-02-02 10:57:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:57:55.407810399 +0000 UTC m=+269.744911477" watchObservedRunningTime="2026-02-02 10:57:55.414360183 +0000 UTC m=+269.751461241" Feb 02 10:57:55 crc kubenswrapper[4838]: I0202 10:57:55.798600 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Feb 02 10:57:56 crc kubenswrapper[4838]: I0202 10:57:56.009431 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Feb 02 10:57:56 crc kubenswrapper[4838]: I0202 10:57:56.323469 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Feb 02 10:57:56 crc kubenswrapper[4838]: I0202 10:57:56.420656 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Feb 02 10:57:56 crc kubenswrapper[4838]: I0202 10:57:56.471270 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Feb 02 10:57:56 crc kubenswrapper[4838]: I0202 10:57:56.516544 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a08fba0-5dfa-4dc5-8cf9-aa4580244e37" path="/var/lib/kubelet/pods/5a08fba0-5dfa-4dc5-8cf9-aa4580244e37/volumes" Feb 02 10:57:56 crc kubenswrapper[4838]: I0202 10:57:56.612691 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Feb 02 10:57:56 crc kubenswrapper[4838]: I0202 10:57:56.777044 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Feb 02 10:57:56 crc kubenswrapper[4838]: I0202 10:57:56.989512 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Feb 02 10:57:57 crc kubenswrapper[4838]: I0202 10:57:57.007000 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Feb 02 10:57:57 crc kubenswrapper[4838]: I0202 10:57:57.025592 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Feb 02 10:57:57 crc kubenswrapper[4838]: I0202 10:57:57.129518 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Feb 02 10:57:57 crc kubenswrapper[4838]: I0202 10:57:57.174102 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Feb 02 10:57:57 crc kubenswrapper[4838]: I0202 10:57:57.347773 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Feb 02 10:57:57 crc kubenswrapper[4838]: I0202 10:57:57.570331 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Feb 02 10:57:57 crc kubenswrapper[4838]: I0202 10:57:57.664790 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 02 10:57:57 crc kubenswrapper[4838]: I0202 10:57:57.838714 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Feb 02 10:57:57 crc kubenswrapper[4838]: I0202 10:57:57.941249 4838 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 02 10:57:57 crc kubenswrapper[4838]: I0202 10:57:57.941700 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://425211004a2eabea8507c01fca1d058f6e41bf730d042651c9cfbde4958b31bc" gracePeriod=5 Feb 02 10:57:58 crc kubenswrapper[4838]: I0202 10:57:58.157702 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Feb 02 10:57:58 crc kubenswrapper[4838]: I0202 10:57:58.248490 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Feb 02 10:57:58 crc kubenswrapper[4838]: I0202 10:57:58.288024 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Feb 02 10:57:58 crc kubenswrapper[4838]: I0202 10:57:58.307652 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Feb 02 10:57:58 crc kubenswrapper[4838]: I0202 10:57:58.323698 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Feb 02 10:57:58 crc kubenswrapper[4838]: I0202 10:57:58.329447 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 10:57:58 crc kubenswrapper[4838]: I0202 10:57:58.559798 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Feb 02 10:57:58 crc kubenswrapper[4838]: I0202 10:57:58.594347 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Feb 02 10:57:58 crc kubenswrapper[4838]: I0202 10:57:58.615000 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 10:57:58 crc kubenswrapper[4838]: I0202 10:57:58.681828 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Feb 02 10:57:58 crc kubenswrapper[4838]: I0202 10:57:58.716677 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Feb 02 10:57:58 crc kubenswrapper[4838]: I0202 10:57:58.765421 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Feb 02 10:57:58 crc kubenswrapper[4838]: I0202 10:57:58.931984 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Feb 02 10:57:58 crc kubenswrapper[4838]: I0202 10:57:58.976984 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 10:57:59 crc kubenswrapper[4838]: I0202 10:57:59.064460 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Feb 02 10:57:59 crc kubenswrapper[4838]: I0202 10:57:59.070729 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Feb 02 10:57:59 crc kubenswrapper[4838]: I0202 10:57:59.170074 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Feb 02 10:57:59 crc kubenswrapper[4838]: I0202 10:57:59.196238 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Feb 02 10:57:59 crc kubenswrapper[4838]: I0202 10:57:59.220887 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 02 10:57:59 crc kubenswrapper[4838]: I0202 10:57:59.306075 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Feb 02 10:57:59 crc kubenswrapper[4838]: I0202 10:57:59.517740 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Feb 02 10:57:59 crc kubenswrapper[4838]: I0202 10:57:59.606814 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Feb 02 10:57:59 crc kubenswrapper[4838]: I0202 10:57:59.619340 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Feb 02 10:57:59 crc kubenswrapper[4838]: I0202 10:57:59.645588 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Feb 02 10:57:59 crc kubenswrapper[4838]: I0202 10:57:59.664700 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Feb 02 10:57:59 crc kubenswrapper[4838]: I0202 10:57:59.682448 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Feb 02 10:57:59 crc kubenswrapper[4838]: I0202 10:57:59.683168 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Feb 02 10:57:59 crc kubenswrapper[4838]: I0202 10:57:59.773968 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Feb 02 10:57:59 crc kubenswrapper[4838]: I0202 10:57:59.930131 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Feb 02 10:57:59 crc kubenswrapper[4838]: I0202 10:57:59.998046 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Feb 02 10:58:00 crc kubenswrapper[4838]: I0202 10:58:00.103234 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Feb 02 10:58:00 crc kubenswrapper[4838]: I0202 10:58:00.185275 4838 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Feb 02 10:58:00 crc kubenswrapper[4838]: I0202 10:58:00.242695 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Feb 02 10:58:00 crc kubenswrapper[4838]: I0202 10:58:00.245298 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 10:58:00 crc kubenswrapper[4838]: I0202 10:58:00.331052 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Feb 02 10:58:00 crc kubenswrapper[4838]: I0202 10:58:00.382349 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Feb 02 10:58:00 crc kubenswrapper[4838]: I0202 10:58:00.400048 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Feb 02 10:58:00 crc kubenswrapper[4838]: I0202 10:58:00.407070 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 10:58:00 crc kubenswrapper[4838]: I0202 10:58:00.435632 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Feb 02 10:58:00 crc kubenswrapper[4838]: I0202 10:58:00.439941 4838 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Feb 02 10:58:00 crc kubenswrapper[4838]: I0202 10:58:00.475059 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Feb 02 10:58:00 crc kubenswrapper[4838]: I0202 10:58:00.535474 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Feb 02 10:58:00 crc kubenswrapper[4838]: I0202 10:58:00.604668 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Feb 02 10:58:00 crc kubenswrapper[4838]: I0202 10:58:00.612551 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Feb 02 10:58:00 crc kubenswrapper[4838]: I0202 10:58:00.657040 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Feb 02 10:58:00 crc kubenswrapper[4838]: I0202 10:58:00.665239 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Feb 02 10:58:00 crc kubenswrapper[4838]: I0202 10:58:00.679790 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Feb 02 10:58:00 crc kubenswrapper[4838]: I0202 10:58:00.778049 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Feb 02 10:58:00 crc kubenswrapper[4838]: I0202 10:58:00.820230 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Feb 02 10:58:00 crc kubenswrapper[4838]: I0202 10:58:00.987541 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Feb 02 10:58:01 crc kubenswrapper[4838]: I0202 10:58:01.014187 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Feb 02 10:58:01 crc kubenswrapper[4838]: I0202 10:58:01.091031 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Feb 02 10:58:01 crc kubenswrapper[4838]: I0202 10:58:01.097823 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 10:58:01 crc kubenswrapper[4838]: I0202 10:58:01.112866 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Feb 02 10:58:01 crc kubenswrapper[4838]: I0202 10:58:01.193185 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Feb 02 10:58:01 crc kubenswrapper[4838]: I0202 10:58:01.230362 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Feb 02 10:58:01 crc kubenswrapper[4838]: I0202 10:58:01.245401 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Feb 02 10:58:01 crc kubenswrapper[4838]: I0202 10:58:01.273756 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Feb 02 10:58:01 crc kubenswrapper[4838]: I0202 10:58:01.512276 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Feb 02 10:58:01 crc kubenswrapper[4838]: I0202 10:58:01.563973 4838 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Feb 02 10:58:01 crc kubenswrapper[4838]: I0202 10:58:01.568784 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Feb 02 10:58:01 crc kubenswrapper[4838]: I0202 10:58:01.626150 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Feb 02 10:58:01 crc kubenswrapper[4838]: I0202 10:58:01.707193 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Feb 02 10:58:01 crc kubenswrapper[4838]: I0202 10:58:01.725572 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Feb 02 10:58:01 crc kubenswrapper[4838]: I0202 10:58:01.773770 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Feb 02 10:58:01 crc kubenswrapper[4838]: I0202 10:58:01.805483 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Feb 02 10:58:01 crc kubenswrapper[4838]: I0202 10:58:01.807700 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Feb 02 10:58:01 crc kubenswrapper[4838]: I0202 10:58:01.859490 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Feb 02 10:58:01 crc kubenswrapper[4838]: I0202 10:58:01.895959 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Feb 02 10:58:02 crc kubenswrapper[4838]: I0202 10:58:02.033335 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Feb 02 10:58:02 crc kubenswrapper[4838]: I0202 10:58:02.184220 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Feb 02 10:58:02 crc kubenswrapper[4838]: I0202 10:58:02.194042 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Feb 02 10:58:02 crc kubenswrapper[4838]: I0202 10:58:02.197103 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Feb 02 10:58:02 crc kubenswrapper[4838]: I0202 10:58:02.281153 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Feb 02 10:58:02 crc kubenswrapper[4838]: I0202 10:58:02.367002 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Feb 02 10:58:02 crc kubenswrapper[4838]: I0202 10:58:02.376899 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 10:58:02 crc kubenswrapper[4838]: I0202 10:58:02.398106 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Feb 02 10:58:02 crc kubenswrapper[4838]: I0202 10:58:02.541998 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Feb 02 10:58:02 crc kubenswrapper[4838]: I0202 10:58:02.585103 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Feb 02 10:58:02 crc kubenswrapper[4838]: I0202 10:58:02.589382 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Feb 02 10:58:02 crc kubenswrapper[4838]: I0202 10:58:02.608249 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Feb 02 10:58:02 crc kubenswrapper[4838]: I0202 10:58:02.662250 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Feb 02 10:58:02 crc kubenswrapper[4838]: I0202 10:58:02.680202 4838 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Feb 02 10:58:02 crc kubenswrapper[4838]: I0202 10:58:02.774257 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Feb 02 10:58:02 crc kubenswrapper[4838]: I0202 10:58:02.875450 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.188049 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.214182 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.235928 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.258598 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.296538 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.368591 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.437861 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.448286 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.486442 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.547813 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.547922 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.593708 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.618861 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.664120 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.664565 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.664656 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.664705 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.664737 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.664749 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.664768 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.664834 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.664864 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.664910 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.665115 4838 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.665161 4838 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.665176 4838 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.665186 4838 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.676223 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.688736 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.725979 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.726036 4838 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="425211004a2eabea8507c01fca1d058f6e41bf730d042651c9cfbde4958b31bc" exitCode=137 Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.726086 4838 scope.go:117] "RemoveContainer" containerID="425211004a2eabea8507c01fca1d058f6e41bf730d042651c9cfbde4958b31bc" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.726227 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.758021 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.766316 4838 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.782248 4838 scope.go:117] "RemoveContainer" containerID="425211004a2eabea8507c01fca1d058f6e41bf730d042651c9cfbde4958b31bc" Feb 02 10:58:03 crc kubenswrapper[4838]: E0202 10:58:03.785493 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"425211004a2eabea8507c01fca1d058f6e41bf730d042651c9cfbde4958b31bc\": container with ID starting with 425211004a2eabea8507c01fca1d058f6e41bf730d042651c9cfbde4958b31bc not found: ID does not exist" containerID="425211004a2eabea8507c01fca1d058f6e41bf730d042651c9cfbde4958b31bc" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.785534 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"425211004a2eabea8507c01fca1d058f6e41bf730d042651c9cfbde4958b31bc"} err="failed to get container status \"425211004a2eabea8507c01fca1d058f6e41bf730d042651c9cfbde4958b31bc\": rpc error: code = NotFound desc = could not find container \"425211004a2eabea8507c01fca1d058f6e41bf730d042651c9cfbde4958b31bc\": container with ID starting with 425211004a2eabea8507c01fca1d058f6e41bf730d042651c9cfbde4958b31bc not found: ID does not exist" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.841578 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.891776 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.906811 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.970604 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.985984 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Feb 02 10:58:03 crc kubenswrapper[4838]: I0202 10:58:03.995340 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 10:58:04 crc kubenswrapper[4838]: I0202 10:58:04.092805 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Feb 02 10:58:04 crc kubenswrapper[4838]: I0202 10:58:04.185889 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Feb 02 10:58:04 crc kubenswrapper[4838]: I0202 10:58:04.190344 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Feb 02 10:58:04 crc kubenswrapper[4838]: I0202 10:58:04.241873 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Feb 02 10:58:04 crc kubenswrapper[4838]: I0202 10:58:04.408491 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Feb 02 10:58:04 crc kubenswrapper[4838]: I0202 10:58:04.514086 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Feb 02 10:58:04 crc kubenswrapper[4838]: I0202 10:58:04.555131 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Feb 02 10:58:04 crc kubenswrapper[4838]: I0202 10:58:04.617736 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Feb 02 10:58:04 crc kubenswrapper[4838]: I0202 10:58:04.640517 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Feb 02 10:58:04 crc kubenswrapper[4838]: I0202 10:58:04.728762 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Feb 02 10:58:04 crc kubenswrapper[4838]: I0202 10:58:04.814894 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Feb 02 10:58:04 crc kubenswrapper[4838]: I0202 10:58:04.874691 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Feb 02 10:58:04 crc kubenswrapper[4838]: I0202 10:58:04.896580 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Feb 02 10:58:04 crc kubenswrapper[4838]: I0202 10:58:04.919055 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Feb 02 10:58:04 crc kubenswrapper[4838]: I0202 10:58:04.969205 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Feb 02 10:58:05 crc kubenswrapper[4838]: I0202 10:58:05.106174 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Feb 02 10:58:05 crc kubenswrapper[4838]: I0202 10:58:05.195944 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 02 10:58:05 crc kubenswrapper[4838]: I0202 10:58:05.208504 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Feb 02 10:58:05 crc kubenswrapper[4838]: I0202 10:58:05.231849 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Feb 02 10:58:05 crc kubenswrapper[4838]: I0202 10:58:05.353765 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Feb 02 10:58:05 crc kubenswrapper[4838]: I0202 10:58:05.368968 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Feb 02 10:58:05 crc kubenswrapper[4838]: I0202 10:58:05.434073 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Feb 02 10:58:05 crc kubenswrapper[4838]: I0202 10:58:05.493386 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Feb 02 10:58:05 crc kubenswrapper[4838]: I0202 10:58:05.643232 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Feb 02 10:58:05 crc kubenswrapper[4838]: I0202 10:58:05.654491 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Feb 02 10:58:05 crc kubenswrapper[4838]: I0202 10:58:05.847233 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Feb 02 10:58:05 crc kubenswrapper[4838]: I0202 10:58:05.851822 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 10:58:05 crc kubenswrapper[4838]: I0202 10:58:05.947771 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.006531 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.016477 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.125253 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.137448 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.178686 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.207798 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.280119 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.362325 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.363881 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.385807 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.397639 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.401734 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.449352 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.523761 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.529921 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-67556b9b9b-bn4vr"] Feb 02 10:58:06 crc kubenswrapper[4838]: E0202 10:58:06.530186 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a08fba0-5dfa-4dc5-8cf9-aa4580244e37" containerName="oauth-openshift" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.530200 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a08fba0-5dfa-4dc5-8cf9-aa4580244e37" containerName="oauth-openshift" Feb 02 10:58:06 crc kubenswrapper[4838]: E0202 10:58:06.530218 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.530225 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 02 10:58:06 crc kubenswrapper[4838]: E0202 10:58:06.530235 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a16c769-012d-4a30-b9db-5629ed018ef8" containerName="installer" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.530243 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a16c769-012d-4a30-b9db-5629ed018ef8" containerName="installer" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.530371 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.530383 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a16c769-012d-4a30-b9db-5629ed018ef8" containerName="installer" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.530395 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a08fba0-5dfa-4dc5-8cf9-aa4580244e37" containerName="oauth-openshift" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.530928 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.532893 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.533300 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.533340 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.533301 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.536471 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.536482 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.536498 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.537496 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.537696 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.537728 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.537866 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.538084 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.557093 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.560636 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.567572 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.601496 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-system-router-certs\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.601552 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.601661 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-user-template-error\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.601697 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-user-template-login\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.601728 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.601771 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-system-session\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.601793 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.601826 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/257ead4d-c144-4f4e-9926-0a7e22ea38e2-audit-dir\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.601851 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/257ead4d-c144-4f4e-9926-0a7e22ea38e2-audit-policies\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.601879 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.601884 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.601930 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-system-service-ca\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.601985 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.602025 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sv5wx\" (UniqueName: \"kubernetes.io/projected/257ead4d-c144-4f4e-9926-0a7e22ea38e2-kube-api-access-sv5wx\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.602055 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.662733 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.703139 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/257ead4d-c144-4f4e-9926-0a7e22ea38e2-audit-dir\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.703218 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/257ead4d-c144-4f4e-9926-0a7e22ea38e2-audit-policies\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.703266 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.703306 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/257ead4d-c144-4f4e-9926-0a7e22ea38e2-audit-dir\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.703325 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-system-service-ca\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.703453 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.703495 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sv5wx\" (UniqueName: \"kubernetes.io/projected/257ead4d-c144-4f4e-9926-0a7e22ea38e2-kube-api-access-sv5wx\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.703558 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.703586 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-system-router-certs\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.703657 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.703737 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-user-template-error\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.703761 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-user-template-login\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.703804 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.703845 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-system-session\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.703869 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.704892 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.705182 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.705710 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/257ead4d-c144-4f4e-9926-0a7e22ea38e2-audit-policies\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.706723 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-system-service-ca\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.709433 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-user-template-error\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.709469 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-system-session\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.711973 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-user-template-login\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.712177 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.712209 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.712235 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.713072 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.728398 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/257ead4d-c144-4f4e-9926-0a7e22ea38e2-v4-0-config-system-router-certs\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.732565 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sv5wx\" (UniqueName: \"kubernetes.io/projected/257ead4d-c144-4f4e-9926-0a7e22ea38e2-kube-api-access-sv5wx\") pod \"oauth-openshift-67556b9b9b-bn4vr\" (UID: \"257ead4d-c144-4f4e-9926-0a7e22ea38e2\") " pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.771397 4838 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.774024 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.806099 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.852775 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.884238 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.893272 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.895521 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Feb 02 10:58:06 crc kubenswrapper[4838]: I0202 10:58:06.947363 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Feb 02 10:58:07 crc kubenswrapper[4838]: I0202 10:58:07.035356 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Feb 02 10:58:07 crc kubenswrapper[4838]: I0202 10:58:07.055037 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Feb 02 10:58:07 crc kubenswrapper[4838]: I0202 10:58:07.066648 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Feb 02 10:58:07 crc kubenswrapper[4838]: I0202 10:58:07.127633 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-67556b9b9b-bn4vr"] Feb 02 10:58:07 crc kubenswrapper[4838]: I0202 10:58:07.142260 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Feb 02 10:58:07 crc kubenswrapper[4838]: I0202 10:58:07.171198 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Feb 02 10:58:07 crc kubenswrapper[4838]: I0202 10:58:07.185991 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Feb 02 10:58:07 crc kubenswrapper[4838]: I0202 10:58:07.372088 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Feb 02 10:58:07 crc kubenswrapper[4838]: I0202 10:58:07.395011 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Feb 02 10:58:07 crc kubenswrapper[4838]: I0202 10:58:07.527804 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Feb 02 10:58:07 crc kubenswrapper[4838]: I0202 10:58:07.580418 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 10:58:07 crc kubenswrapper[4838]: I0202 10:58:07.664532 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-67556b9b9b-bn4vr"] Feb 02 10:58:07 crc kubenswrapper[4838]: I0202 10:58:07.716906 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Feb 02 10:58:07 crc kubenswrapper[4838]: I0202 10:58:07.746102 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Feb 02 10:58:07 crc kubenswrapper[4838]: I0202 10:58:07.747290 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Feb 02 10:58:07 crc kubenswrapper[4838]: I0202 10:58:07.750511 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" event={"ID":"257ead4d-c144-4f4e-9926-0a7e22ea38e2","Type":"ContainerStarted","Data":"aa45378863b9da6b9f8082100cd4f6b563c725d3c951e9f90e889c02da0af624"} Feb 02 10:58:07 crc kubenswrapper[4838]: I0202 10:58:07.836552 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Feb 02 10:58:07 crc kubenswrapper[4838]: I0202 10:58:07.950932 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Feb 02 10:58:07 crc kubenswrapper[4838]: I0202 10:58:07.987846 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Feb 02 10:58:08 crc kubenswrapper[4838]: I0202 10:58:08.100592 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Feb 02 10:58:08 crc kubenswrapper[4838]: I0202 10:58:08.203417 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Feb 02 10:58:08 crc kubenswrapper[4838]: I0202 10:58:08.241589 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Feb 02 10:58:08 crc kubenswrapper[4838]: I0202 10:58:08.286082 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Feb 02 10:58:08 crc kubenswrapper[4838]: I0202 10:58:08.441586 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Feb 02 10:58:08 crc kubenswrapper[4838]: I0202 10:58:08.491183 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Feb 02 10:58:08 crc kubenswrapper[4838]: I0202 10:58:08.526854 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Feb 02 10:58:08 crc kubenswrapper[4838]: I0202 10:58:08.581937 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Feb 02 10:58:08 crc kubenswrapper[4838]: I0202 10:58:08.630653 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Feb 02 10:58:08 crc kubenswrapper[4838]: I0202 10:58:08.715452 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Feb 02 10:58:08 crc kubenswrapper[4838]: I0202 10:58:08.758331 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" event={"ID":"257ead4d-c144-4f4e-9926-0a7e22ea38e2","Type":"ContainerStarted","Data":"65cea60e42c1ae56a0c4ce8e4b3e4db43af379619fdfbcdfa15bdc3d09bb12be"} Feb 02 10:58:08 crc kubenswrapper[4838]: I0202 10:58:08.758668 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:08 crc kubenswrapper[4838]: I0202 10:58:08.764544 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" Feb 02 10:58:08 crc kubenswrapper[4838]: I0202 10:58:08.800657 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-67556b9b9b-bn4vr" podStartSLOduration=50.800630922 podStartE2EDuration="50.800630922s" podCreationTimestamp="2026-02-02 10:57:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:58:08.785723504 +0000 UTC m=+283.122824592" watchObservedRunningTime="2026-02-02 10:58:08.800630922 +0000 UTC m=+283.137731970" Feb 02 10:58:08 crc kubenswrapper[4838]: I0202 10:58:08.880015 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Feb 02 10:58:08 crc kubenswrapper[4838]: I0202 10:58:08.882523 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Feb 02 10:58:08 crc kubenswrapper[4838]: I0202 10:58:08.987290 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Feb 02 10:58:09 crc kubenswrapper[4838]: I0202 10:58:09.023047 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Feb 02 10:58:09 crc kubenswrapper[4838]: I0202 10:58:09.033251 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Feb 02 10:58:09 crc kubenswrapper[4838]: I0202 10:58:09.058692 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Feb 02 10:58:09 crc kubenswrapper[4838]: I0202 10:58:09.098806 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Feb 02 10:58:09 crc kubenswrapper[4838]: I0202 10:58:09.129610 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Feb 02 10:58:09 crc kubenswrapper[4838]: I0202 10:58:09.162648 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Feb 02 10:58:09 crc kubenswrapper[4838]: I0202 10:58:09.186722 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Feb 02 10:58:09 crc kubenswrapper[4838]: I0202 10:58:09.216608 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Feb 02 10:58:09 crc kubenswrapper[4838]: I0202 10:58:09.302350 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Feb 02 10:58:09 crc kubenswrapper[4838]: I0202 10:58:09.345827 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Feb 02 10:58:09 crc kubenswrapper[4838]: I0202 10:58:09.412751 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Feb 02 10:58:09 crc kubenswrapper[4838]: I0202 10:58:09.532753 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Feb 02 10:58:09 crc kubenswrapper[4838]: I0202 10:58:09.720325 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 10:58:09 crc kubenswrapper[4838]: I0202 10:58:09.731564 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Feb 02 10:58:09 crc kubenswrapper[4838]: I0202 10:58:09.746523 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Feb 02 10:58:09 crc kubenswrapper[4838]: I0202 10:58:09.793472 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Feb 02 10:58:09 crc kubenswrapper[4838]: I0202 10:58:09.857971 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Feb 02 10:58:09 crc kubenswrapper[4838]: I0202 10:58:09.879571 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Feb 02 10:58:09 crc kubenswrapper[4838]: I0202 10:58:09.943552 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Feb 02 10:58:10 crc kubenswrapper[4838]: I0202 10:58:10.144971 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Feb 02 10:58:10 crc kubenswrapper[4838]: I0202 10:58:10.296122 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 10:58:10 crc kubenswrapper[4838]: I0202 10:58:10.382067 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Feb 02 10:58:10 crc kubenswrapper[4838]: I0202 10:58:10.520506 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Feb 02 10:58:10 crc kubenswrapper[4838]: I0202 10:58:10.538248 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Feb 02 10:58:10 crc kubenswrapper[4838]: I0202 10:58:10.563312 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Feb 02 10:58:10 crc kubenswrapper[4838]: I0202 10:58:10.581468 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Feb 02 10:58:10 crc kubenswrapper[4838]: I0202 10:58:10.582829 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 10:58:10 crc kubenswrapper[4838]: I0202 10:58:10.657793 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Feb 02 10:58:10 crc kubenswrapper[4838]: I0202 10:58:10.682600 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Feb 02 10:58:10 crc kubenswrapper[4838]: I0202 10:58:10.865856 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Feb 02 10:58:11 crc kubenswrapper[4838]: I0202 10:58:11.071506 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Feb 02 10:58:11 crc kubenswrapper[4838]: I0202 10:58:11.127558 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Feb 02 10:58:11 crc kubenswrapper[4838]: I0202 10:58:11.270895 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Feb 02 10:58:11 crc kubenswrapper[4838]: I0202 10:58:11.560962 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Feb 02 10:58:12 crc kubenswrapper[4838]: I0202 10:58:12.526510 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Feb 02 10:58:13 crc kubenswrapper[4838]: I0202 10:58:13.789830 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Feb 02 10:58:13 crc kubenswrapper[4838]: I0202 10:58:13.890829 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn"] Feb 02 10:58:13 crc kubenswrapper[4838]: I0202 10:58:13.891196 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" podUID="2b9efaa6-38f5-4e59-99b5-c16e0d93755e" containerName="controller-manager" containerID="cri-o://a3dedabece85238e5b3773b44421da86c937bfd4bec3d7a726d17b23808a8db4" gracePeriod=30 Feb 02 10:58:13 crc kubenswrapper[4838]: I0202 10:58:13.989555 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg"] Feb 02 10:58:13 crc kubenswrapper[4838]: I0202 10:58:13.992938 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" podUID="7cf2e0d7-4a0e-492d-aeb2-60855bdd4328" containerName="route-controller-manager" containerID="cri-o://2b0d8fedd7e494a993e6019444b9575cadbe949a261b4964fd625dd2522c4bf6" gracePeriod=30 Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.267244 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.396222 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-proxy-ca-bundles\") pod \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\" (UID: \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\") " Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.396323 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-config\") pod \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\" (UID: \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\") " Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.396351 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-serving-cert\") pod \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\" (UID: \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\") " Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.396370 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhrsj\" (UniqueName: \"kubernetes.io/projected/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-kube-api-access-bhrsj\") pod \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\" (UID: \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\") " Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.396403 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-client-ca\") pod \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\" (UID: \"2b9efaa6-38f5-4e59-99b5-c16e0d93755e\") " Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.397116 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "2b9efaa6-38f5-4e59-99b5-c16e0d93755e" (UID: "2b9efaa6-38f5-4e59-99b5-c16e0d93755e"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.397139 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-client-ca" (OuterVolumeSpecName: "client-ca") pod "2b9efaa6-38f5-4e59-99b5-c16e0d93755e" (UID: "2b9efaa6-38f5-4e59-99b5-c16e0d93755e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.397182 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-config" (OuterVolumeSpecName: "config") pod "2b9efaa6-38f5-4e59-99b5-c16e0d93755e" (UID: "2b9efaa6-38f5-4e59-99b5-c16e0d93755e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.402480 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "2b9efaa6-38f5-4e59-99b5-c16e0d93755e" (UID: "2b9efaa6-38f5-4e59-99b5-c16e0d93755e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.402654 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-kube-api-access-bhrsj" (OuterVolumeSpecName: "kube-api-access-bhrsj") pod "2b9efaa6-38f5-4e59-99b5-c16e0d93755e" (UID: "2b9efaa6-38f5-4e59-99b5-c16e0d93755e"). InnerVolumeSpecName "kube-api-access-bhrsj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.498182 4838 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.498247 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.498258 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.498267 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhrsj\" (UniqueName: \"kubernetes.io/projected/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-kube-api-access-bhrsj\") on node \"crc\" DevicePath \"\"" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.498278 4838 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2b9efaa6-38f5-4e59-99b5-c16e0d93755e-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.669819 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.790797 4838 generic.go:334] "Generic (PLEG): container finished" podID="2b9efaa6-38f5-4e59-99b5-c16e0d93755e" containerID="a3dedabece85238e5b3773b44421da86c937bfd4bec3d7a726d17b23808a8db4" exitCode=0 Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.790844 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.790885 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" event={"ID":"2b9efaa6-38f5-4e59-99b5-c16e0d93755e","Type":"ContainerDied","Data":"a3dedabece85238e5b3773b44421da86c937bfd4bec3d7a726d17b23808a8db4"} Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.790919 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn" event={"ID":"2b9efaa6-38f5-4e59-99b5-c16e0d93755e","Type":"ContainerDied","Data":"b0513cdb80f7cf5a9af336e896f27ca31f2f88ebc76667f662d15deb51e81538"} Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.790943 4838 scope.go:117] "RemoveContainer" containerID="a3dedabece85238e5b3773b44421da86c937bfd4bec3d7a726d17b23808a8db4" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.795436 4838 generic.go:334] "Generic (PLEG): container finished" podID="7cf2e0d7-4a0e-492d-aeb2-60855bdd4328" containerID="2b0d8fedd7e494a993e6019444b9575cadbe949a261b4964fd625dd2522c4bf6" exitCode=0 Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.795465 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" event={"ID":"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328","Type":"ContainerDied","Data":"2b0d8fedd7e494a993e6019444b9575cadbe949a261b4964fd625dd2522c4bf6"} Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.795503 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" event={"ID":"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328","Type":"ContainerDied","Data":"d20a23227de8cde10c11f3fe22747925820b13551149c5d93aa190bbfe666b1c"} Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.795520 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.801357 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rzp7\" (UniqueName: \"kubernetes.io/projected/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-kube-api-access-9rzp7\") pod \"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328\" (UID: \"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328\") " Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.801422 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-serving-cert\") pod \"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328\" (UID: \"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328\") " Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.801489 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-config\") pod \"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328\" (UID: \"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328\") " Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.801552 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-client-ca\") pod \"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328\" (UID: \"7cf2e0d7-4a0e-492d-aeb2-60855bdd4328\") " Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.803113 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-client-ca" (OuterVolumeSpecName: "client-ca") pod "7cf2e0d7-4a0e-492d-aeb2-60855bdd4328" (UID: "7cf2e0d7-4a0e-492d-aeb2-60855bdd4328"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.803140 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-config" (OuterVolumeSpecName: "config") pod "7cf2e0d7-4a0e-492d-aeb2-60855bdd4328" (UID: "7cf2e0d7-4a0e-492d-aeb2-60855bdd4328"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.806910 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7cf2e0d7-4a0e-492d-aeb2-60855bdd4328" (UID: "7cf2e0d7-4a0e-492d-aeb2-60855bdd4328"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.807596 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-kube-api-access-9rzp7" (OuterVolumeSpecName: "kube-api-access-9rzp7") pod "7cf2e0d7-4a0e-492d-aeb2-60855bdd4328" (UID: "7cf2e0d7-4a0e-492d-aeb2-60855bdd4328"). InnerVolumeSpecName "kube-api-access-9rzp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.809103 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn"] Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.811519 4838 scope.go:117] "RemoveContainer" containerID="a3dedabece85238e5b3773b44421da86c937bfd4bec3d7a726d17b23808a8db4" Feb 02 10:58:14 crc kubenswrapper[4838]: E0202 10:58:14.811919 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3dedabece85238e5b3773b44421da86c937bfd4bec3d7a726d17b23808a8db4\": container with ID starting with a3dedabece85238e5b3773b44421da86c937bfd4bec3d7a726d17b23808a8db4 not found: ID does not exist" containerID="a3dedabece85238e5b3773b44421da86c937bfd4bec3d7a726d17b23808a8db4" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.811957 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3dedabece85238e5b3773b44421da86c937bfd4bec3d7a726d17b23808a8db4"} err="failed to get container status \"a3dedabece85238e5b3773b44421da86c937bfd4bec3d7a726d17b23808a8db4\": rpc error: code = NotFound desc = could not find container \"a3dedabece85238e5b3773b44421da86c937bfd4bec3d7a726d17b23808a8db4\": container with ID starting with a3dedabece85238e5b3773b44421da86c937bfd4bec3d7a726d17b23808a8db4 not found: ID does not exist" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.811985 4838 scope.go:117] "RemoveContainer" containerID="2b0d8fedd7e494a993e6019444b9575cadbe949a261b4964fd625dd2522c4bf6" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.815366 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7f7d77ff9f-8p4qn"] Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.825673 4838 scope.go:117] "RemoveContainer" containerID="2b0d8fedd7e494a993e6019444b9575cadbe949a261b4964fd625dd2522c4bf6" Feb 02 10:58:14 crc kubenswrapper[4838]: E0202 10:58:14.826134 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b0d8fedd7e494a993e6019444b9575cadbe949a261b4964fd625dd2522c4bf6\": container with ID starting with 2b0d8fedd7e494a993e6019444b9575cadbe949a261b4964fd625dd2522c4bf6 not found: ID does not exist" containerID="2b0d8fedd7e494a993e6019444b9575cadbe949a261b4964fd625dd2522c4bf6" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.826219 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b0d8fedd7e494a993e6019444b9575cadbe949a261b4964fd625dd2522c4bf6"} err="failed to get container status \"2b0d8fedd7e494a993e6019444b9575cadbe949a261b4964fd625dd2522c4bf6\": rpc error: code = NotFound desc = could not find container \"2b0d8fedd7e494a993e6019444b9575cadbe949a261b4964fd625dd2522c4bf6\": container with ID starting with 2b0d8fedd7e494a993e6019444b9575cadbe949a261b4964fd625dd2522c4bf6 not found: ID does not exist" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.903158 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.903217 4838 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.903239 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rzp7\" (UniqueName: \"kubernetes.io/projected/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-kube-api-access-9rzp7\") on node \"crc\" DevicePath \"\"" Feb 02 10:58:14 crc kubenswrapper[4838]: I0202 10:58:14.903260 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.133944 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg"] Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.147728 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-766676dc96-d44sg"] Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.799125 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6"] Feb 02 10:58:15 crc kubenswrapper[4838]: E0202 10:58:15.799526 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cf2e0d7-4a0e-492d-aeb2-60855bdd4328" containerName="route-controller-manager" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.799554 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cf2e0d7-4a0e-492d-aeb2-60855bdd4328" containerName="route-controller-manager" Feb 02 10:58:15 crc kubenswrapper[4838]: E0202 10:58:15.799594 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b9efaa6-38f5-4e59-99b5-c16e0d93755e" containerName="controller-manager" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.799607 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b9efaa6-38f5-4e59-99b5-c16e0d93755e" containerName="controller-manager" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.799906 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cf2e0d7-4a0e-492d-aeb2-60855bdd4328" containerName="route-controller-manager" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.799936 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b9efaa6-38f5-4e59-99b5-c16e0d93755e" containerName="controller-manager" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.800818 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.803748 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.804667 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.804720 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.804820 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.807332 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.807525 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.811768 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-59bdffccc9-wb6pp"] Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.812456 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.816375 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.817323 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.819503 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.819543 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.819523 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.820401 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.821678 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6"] Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.829790 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-59bdffccc9-wb6pp"] Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.831541 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.917812 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4s9n\" (UniqueName: \"kubernetes.io/projected/27cbad85-0739-4361-963c-969ea080f3f5-kube-api-access-s4s9n\") pod \"controller-manager-59bdffccc9-wb6pp\" (UID: \"27cbad85-0739-4361-963c-969ea080f3f5\") " pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.917882 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/27cbad85-0739-4361-963c-969ea080f3f5-client-ca\") pod \"controller-manager-59bdffccc9-wb6pp\" (UID: \"27cbad85-0739-4361-963c-969ea080f3f5\") " pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.918225 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e9e9517-6d6f-4fdd-874c-4095c05415c9-serving-cert\") pod \"route-controller-manager-7b57bbc5d8-87tk6\" (UID: \"5e9e9517-6d6f-4fdd-874c-4095c05415c9\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.918279 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27cbad85-0739-4361-963c-969ea080f3f5-config\") pod \"controller-manager-59bdffccc9-wb6pp\" (UID: \"27cbad85-0739-4361-963c-969ea080f3f5\") " pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.918329 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e9e9517-6d6f-4fdd-874c-4095c05415c9-config\") pod \"route-controller-manager-7b57bbc5d8-87tk6\" (UID: \"5e9e9517-6d6f-4fdd-874c-4095c05415c9\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.918372 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fskl2\" (UniqueName: \"kubernetes.io/projected/5e9e9517-6d6f-4fdd-874c-4095c05415c9-kube-api-access-fskl2\") pod \"route-controller-manager-7b57bbc5d8-87tk6\" (UID: \"5e9e9517-6d6f-4fdd-874c-4095c05415c9\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.918491 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27cbad85-0739-4361-963c-969ea080f3f5-serving-cert\") pod \"controller-manager-59bdffccc9-wb6pp\" (UID: \"27cbad85-0739-4361-963c-969ea080f3f5\") " pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.918540 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27cbad85-0739-4361-963c-969ea080f3f5-proxy-ca-bundles\") pod \"controller-manager-59bdffccc9-wb6pp\" (UID: \"27cbad85-0739-4361-963c-969ea080f3f5\") " pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" Feb 02 10:58:15 crc kubenswrapper[4838]: I0202 10:58:15.918594 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5e9e9517-6d6f-4fdd-874c-4095c05415c9-client-ca\") pod \"route-controller-manager-7b57bbc5d8-87tk6\" (UID: \"5e9e9517-6d6f-4fdd-874c-4095c05415c9\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.019600 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5e9e9517-6d6f-4fdd-874c-4095c05415c9-client-ca\") pod \"route-controller-manager-7b57bbc5d8-87tk6\" (UID: \"5e9e9517-6d6f-4fdd-874c-4095c05415c9\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.019949 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4s9n\" (UniqueName: \"kubernetes.io/projected/27cbad85-0739-4361-963c-969ea080f3f5-kube-api-access-s4s9n\") pod \"controller-manager-59bdffccc9-wb6pp\" (UID: \"27cbad85-0739-4361-963c-969ea080f3f5\") " pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.020111 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/27cbad85-0739-4361-963c-969ea080f3f5-client-ca\") pod \"controller-manager-59bdffccc9-wb6pp\" (UID: \"27cbad85-0739-4361-963c-969ea080f3f5\") " pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.020194 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e9e9517-6d6f-4fdd-874c-4095c05415c9-serving-cert\") pod \"route-controller-manager-7b57bbc5d8-87tk6\" (UID: \"5e9e9517-6d6f-4fdd-874c-4095c05415c9\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.020270 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27cbad85-0739-4361-963c-969ea080f3f5-config\") pod \"controller-manager-59bdffccc9-wb6pp\" (UID: \"27cbad85-0739-4361-963c-969ea080f3f5\") " pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.020354 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e9e9517-6d6f-4fdd-874c-4095c05415c9-config\") pod \"route-controller-manager-7b57bbc5d8-87tk6\" (UID: \"5e9e9517-6d6f-4fdd-874c-4095c05415c9\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.020424 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fskl2\" (UniqueName: \"kubernetes.io/projected/5e9e9517-6d6f-4fdd-874c-4095c05415c9-kube-api-access-fskl2\") pod \"route-controller-manager-7b57bbc5d8-87tk6\" (UID: \"5e9e9517-6d6f-4fdd-874c-4095c05415c9\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.020533 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27cbad85-0739-4361-963c-969ea080f3f5-serving-cert\") pod \"controller-manager-59bdffccc9-wb6pp\" (UID: \"27cbad85-0739-4361-963c-969ea080f3f5\") " pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.020602 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27cbad85-0739-4361-963c-969ea080f3f5-proxy-ca-bundles\") pod \"controller-manager-59bdffccc9-wb6pp\" (UID: \"27cbad85-0739-4361-963c-969ea080f3f5\") " pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.021482 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5e9e9517-6d6f-4fdd-874c-4095c05415c9-client-ca\") pod \"route-controller-manager-7b57bbc5d8-87tk6\" (UID: \"5e9e9517-6d6f-4fdd-874c-4095c05415c9\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.022234 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e9e9517-6d6f-4fdd-874c-4095c05415c9-config\") pod \"route-controller-manager-7b57bbc5d8-87tk6\" (UID: \"5e9e9517-6d6f-4fdd-874c-4095c05415c9\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.022348 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/27cbad85-0739-4361-963c-969ea080f3f5-client-ca\") pod \"controller-manager-59bdffccc9-wb6pp\" (UID: \"27cbad85-0739-4361-963c-969ea080f3f5\") " pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.022822 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27cbad85-0739-4361-963c-969ea080f3f5-proxy-ca-bundles\") pod \"controller-manager-59bdffccc9-wb6pp\" (UID: \"27cbad85-0739-4361-963c-969ea080f3f5\") " pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.025266 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e9e9517-6d6f-4fdd-874c-4095c05415c9-serving-cert\") pod \"route-controller-manager-7b57bbc5d8-87tk6\" (UID: \"5e9e9517-6d6f-4fdd-874c-4095c05415c9\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.027715 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27cbad85-0739-4361-963c-969ea080f3f5-serving-cert\") pod \"controller-manager-59bdffccc9-wb6pp\" (UID: \"27cbad85-0739-4361-963c-969ea080f3f5\") " pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.034205 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27cbad85-0739-4361-963c-969ea080f3f5-config\") pod \"controller-manager-59bdffccc9-wb6pp\" (UID: \"27cbad85-0739-4361-963c-969ea080f3f5\") " pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.040674 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fskl2\" (UniqueName: \"kubernetes.io/projected/5e9e9517-6d6f-4fdd-874c-4095c05415c9-kube-api-access-fskl2\") pod \"route-controller-manager-7b57bbc5d8-87tk6\" (UID: \"5e9e9517-6d6f-4fdd-874c-4095c05415c9\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.051445 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4s9n\" (UniqueName: \"kubernetes.io/projected/27cbad85-0739-4361-963c-969ea080f3f5-kube-api-access-s4s9n\") pod \"controller-manager-59bdffccc9-wb6pp\" (UID: \"27cbad85-0739-4361-963c-969ea080f3f5\") " pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.138417 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.146971 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.419920 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6"] Feb 02 10:58:16 crc kubenswrapper[4838]: W0202 10:58:16.424302 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5e9e9517_6d6f_4fdd_874c_4095c05415c9.slice/crio-a32add00f1173991b95c25e20de0948f72f59f3d053cd9cb2c005af361573271 WatchSource:0}: Error finding container a32add00f1173991b95c25e20de0948f72f59f3d053cd9cb2c005af361573271: Status 404 returned error can't find the container with id a32add00f1173991b95c25e20de0948f72f59f3d053cd9cb2c005af361573271 Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.516032 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b9efaa6-38f5-4e59-99b5-c16e0d93755e" path="/var/lib/kubelet/pods/2b9efaa6-38f5-4e59-99b5-c16e0d93755e/volumes" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.517493 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7cf2e0d7-4a0e-492d-aeb2-60855bdd4328" path="/var/lib/kubelet/pods/7cf2e0d7-4a0e-492d-aeb2-60855bdd4328/volumes" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.569982 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-59bdffccc9-wb6pp"] Feb 02 10:58:16 crc kubenswrapper[4838]: W0202 10:58:16.576665 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod27cbad85_0739_4361_963c_969ea080f3f5.slice/crio-07957e5840e46ce339a0a05779be8ee700d7224761a37d60ad497157d5178ffb WatchSource:0}: Error finding container 07957e5840e46ce339a0a05779be8ee700d7224761a37d60ad497157d5178ffb: Status 404 returned error can't find the container with id 07957e5840e46ce339a0a05779be8ee700d7224761a37d60ad497157d5178ffb Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.828237 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" event={"ID":"5e9e9517-6d6f-4fdd-874c-4095c05415c9","Type":"ContainerStarted","Data":"d1e467447a653b99b65a4655ed981f7f0b639253ad21c60e532c9abcbb3d29c3"} Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.828313 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" event={"ID":"5e9e9517-6d6f-4fdd-874c-4095c05415c9","Type":"ContainerStarted","Data":"a32add00f1173991b95c25e20de0948f72f59f3d053cd9cb2c005af361573271"} Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.828574 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.832322 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" event={"ID":"27cbad85-0739-4361-963c-969ea080f3f5","Type":"ContainerStarted","Data":"3c175f26644badbe059abd5dbe4359d1d2267f93600f14203920da612ccffd9e"} Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.832365 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" event={"ID":"27cbad85-0739-4361-963c-969ea080f3f5","Type":"ContainerStarted","Data":"07957e5840e46ce339a0a05779be8ee700d7224761a37d60ad497157d5178ffb"} Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.833000 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.838768 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.877009 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" podStartSLOduration=3.8768984619999998 podStartE2EDuration="3.876898462s" podCreationTimestamp="2026-02-02 10:58:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:58:16.856116269 +0000 UTC m=+291.193217297" watchObservedRunningTime="2026-02-02 10:58:16.876898462 +0000 UTC m=+291.213999520" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.881525 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-59bdffccc9-wb6pp" podStartSLOduration=3.881505131 podStartE2EDuration="3.881505131s" podCreationTimestamp="2026-02-02 10:58:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:58:16.878297241 +0000 UTC m=+291.215398289" watchObservedRunningTime="2026-02-02 10:58:16.881505131 +0000 UTC m=+291.218606199" Feb 02 10:58:16 crc kubenswrapper[4838]: I0202 10:58:16.937382 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" Feb 02 10:58:25 crc kubenswrapper[4838]: I0202 10:58:25.890041 4838 generic.go:334] "Generic (PLEG): container finished" podID="3bffe14a-0216-4854-b0fc-7c482a297b82" containerID="4bcd58e413a776a9641083c37d859395b5d0a24ade0d69ad3cc133dd07cd6896" exitCode=0 Feb 02 10:58:25 crc kubenswrapper[4838]: I0202 10:58:25.890155 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" event={"ID":"3bffe14a-0216-4854-b0fc-7c482a297b82","Type":"ContainerDied","Data":"4bcd58e413a776a9641083c37d859395b5d0a24ade0d69ad3cc133dd07cd6896"} Feb 02 10:58:25 crc kubenswrapper[4838]: I0202 10:58:25.891030 4838 scope.go:117] "RemoveContainer" containerID="4bcd58e413a776a9641083c37d859395b5d0a24ade0d69ad3cc133dd07cd6896" Feb 02 10:58:26 crc kubenswrapper[4838]: I0202 10:58:26.257436 4838 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Feb 02 10:58:26 crc kubenswrapper[4838]: I0202 10:58:26.900821 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" event={"ID":"3bffe14a-0216-4854-b0fc-7c482a297b82","Type":"ContainerStarted","Data":"010ec0a466638dfb9946541c87b10692e080aa0356ad1355b5835782286cc670"} Feb 02 10:58:26 crc kubenswrapper[4838]: I0202 10:58:26.902763 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" Feb 02 10:58:26 crc kubenswrapper[4838]: I0202 10:58:26.905356 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" Feb 02 10:58:33 crc kubenswrapper[4838]: I0202 10:58:33.491081 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6"] Feb 02 10:58:33 crc kubenswrapper[4838]: I0202 10:58:33.491672 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" podUID="5e9e9517-6d6f-4fdd-874c-4095c05415c9" containerName="route-controller-manager" containerID="cri-o://d1e467447a653b99b65a4655ed981f7f0b639253ad21c60e532c9abcbb3d29c3" gracePeriod=30 Feb 02 10:58:33 crc kubenswrapper[4838]: I0202 10:58:33.953185 4838 generic.go:334] "Generic (PLEG): container finished" podID="5e9e9517-6d6f-4fdd-874c-4095c05415c9" containerID="d1e467447a653b99b65a4655ed981f7f0b639253ad21c60e532c9abcbb3d29c3" exitCode=0 Feb 02 10:58:33 crc kubenswrapper[4838]: I0202 10:58:33.953254 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" event={"ID":"5e9e9517-6d6f-4fdd-874c-4095c05415c9","Type":"ContainerDied","Data":"d1e467447a653b99b65a4655ed981f7f0b639253ad21c60e532c9abcbb3d29c3"} Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.832024 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.866235 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5e9e9517-6d6f-4fdd-874c-4095c05415c9-client-ca\") pod \"5e9e9517-6d6f-4fdd-874c-4095c05415c9\" (UID: \"5e9e9517-6d6f-4fdd-874c-4095c05415c9\") " Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.866296 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e9e9517-6d6f-4fdd-874c-4095c05415c9-config\") pod \"5e9e9517-6d6f-4fdd-874c-4095c05415c9\" (UID: \"5e9e9517-6d6f-4fdd-874c-4095c05415c9\") " Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.866342 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e9e9517-6d6f-4fdd-874c-4095c05415c9-serving-cert\") pod \"5e9e9517-6d6f-4fdd-874c-4095c05415c9\" (UID: \"5e9e9517-6d6f-4fdd-874c-4095c05415c9\") " Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.866376 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fskl2\" (UniqueName: \"kubernetes.io/projected/5e9e9517-6d6f-4fdd-874c-4095c05415c9-kube-api-access-fskl2\") pod \"5e9e9517-6d6f-4fdd-874c-4095c05415c9\" (UID: \"5e9e9517-6d6f-4fdd-874c-4095c05415c9\") " Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.869793 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz"] Feb 02 10:58:34 crc kubenswrapper[4838]: E0202 10:58:34.870138 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e9e9517-6d6f-4fdd-874c-4095c05415c9" containerName="route-controller-manager" Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.870187 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e9e9517-6d6f-4fdd-874c-4095c05415c9" containerName="route-controller-manager" Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.870357 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e9e9517-6d6f-4fdd-874c-4095c05415c9" containerName="route-controller-manager" Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.870929 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.873408 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e9e9517-6d6f-4fdd-874c-4095c05415c9-kube-api-access-fskl2" (OuterVolumeSpecName: "kube-api-access-fskl2") pod "5e9e9517-6d6f-4fdd-874c-4095c05415c9" (UID: "5e9e9517-6d6f-4fdd-874c-4095c05415c9"). InnerVolumeSpecName "kube-api-access-fskl2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.874690 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e9e9517-6d6f-4fdd-874c-4095c05415c9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5e9e9517-6d6f-4fdd-874c-4095c05415c9" (UID: "5e9e9517-6d6f-4fdd-874c-4095c05415c9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.875661 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e9e9517-6d6f-4fdd-874c-4095c05415c9-config" (OuterVolumeSpecName: "config") pod "5e9e9517-6d6f-4fdd-874c-4095c05415c9" (UID: "5e9e9517-6d6f-4fdd-874c-4095c05415c9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.880343 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz"] Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.890068 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e9e9517-6d6f-4fdd-874c-4095c05415c9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5e9e9517-6d6f-4fdd-874c-4095c05415c9" (UID: "5e9e9517-6d6f-4fdd-874c-4095c05415c9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.960638 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" event={"ID":"5e9e9517-6d6f-4fdd-874c-4095c05415c9","Type":"ContainerDied","Data":"a32add00f1173991b95c25e20de0948f72f59f3d053cd9cb2c005af361573271"} Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.960689 4838 scope.go:117] "RemoveContainer" containerID="d1e467447a653b99b65a4655ed981f7f0b639253ad21c60e532c9abcbb3d29c3" Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.960778 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6" Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.967214 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/64b902a9-d3f3-45ea-bb08-385972ffe5f9-client-ca\") pod \"route-controller-manager-64ccff8bcf-hbvzz\" (UID: \"64b902a9-d3f3-45ea-bb08-385972ffe5f9\") " pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.967258 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64b902a9-d3f3-45ea-bb08-385972ffe5f9-serving-cert\") pod \"route-controller-manager-64ccff8bcf-hbvzz\" (UID: \"64b902a9-d3f3-45ea-bb08-385972ffe5f9\") " pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.967327 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64b902a9-d3f3-45ea-bb08-385972ffe5f9-config\") pod \"route-controller-manager-64ccff8bcf-hbvzz\" (UID: \"64b902a9-d3f3-45ea-bb08-385972ffe5f9\") " pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.967403 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqmkd\" (UniqueName: \"kubernetes.io/projected/64b902a9-d3f3-45ea-bb08-385972ffe5f9-kube-api-access-fqmkd\") pod \"route-controller-manager-64ccff8bcf-hbvzz\" (UID: \"64b902a9-d3f3-45ea-bb08-385972ffe5f9\") " pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.967478 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e9e9517-6d6f-4fdd-874c-4095c05415c9-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.967692 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fskl2\" (UniqueName: \"kubernetes.io/projected/5e9e9517-6d6f-4fdd-874c-4095c05415c9-kube-api-access-fskl2\") on node \"crc\" DevicePath \"\"" Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.967705 4838 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5e9e9517-6d6f-4fdd-874c-4095c05415c9-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.967723 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e9e9517-6d6f-4fdd-874c-4095c05415c9-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.989169 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6"] Feb 02 10:58:34 crc kubenswrapper[4838]: I0202 10:58:34.991814 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-87tk6"] Feb 02 10:58:35 crc kubenswrapper[4838]: I0202 10:58:35.068880 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqmkd\" (UniqueName: \"kubernetes.io/projected/64b902a9-d3f3-45ea-bb08-385972ffe5f9-kube-api-access-fqmkd\") pod \"route-controller-manager-64ccff8bcf-hbvzz\" (UID: \"64b902a9-d3f3-45ea-bb08-385972ffe5f9\") " pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" Feb 02 10:58:35 crc kubenswrapper[4838]: I0202 10:58:35.069012 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/64b902a9-d3f3-45ea-bb08-385972ffe5f9-client-ca\") pod \"route-controller-manager-64ccff8bcf-hbvzz\" (UID: \"64b902a9-d3f3-45ea-bb08-385972ffe5f9\") " pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" Feb 02 10:58:35 crc kubenswrapper[4838]: I0202 10:58:35.069073 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64b902a9-d3f3-45ea-bb08-385972ffe5f9-serving-cert\") pod \"route-controller-manager-64ccff8bcf-hbvzz\" (UID: \"64b902a9-d3f3-45ea-bb08-385972ffe5f9\") " pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" Feb 02 10:58:35 crc kubenswrapper[4838]: I0202 10:58:35.069122 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64b902a9-d3f3-45ea-bb08-385972ffe5f9-config\") pod \"route-controller-manager-64ccff8bcf-hbvzz\" (UID: \"64b902a9-d3f3-45ea-bb08-385972ffe5f9\") " pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" Feb 02 10:58:35 crc kubenswrapper[4838]: I0202 10:58:35.070754 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/64b902a9-d3f3-45ea-bb08-385972ffe5f9-client-ca\") pod \"route-controller-manager-64ccff8bcf-hbvzz\" (UID: \"64b902a9-d3f3-45ea-bb08-385972ffe5f9\") " pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" Feb 02 10:58:35 crc kubenswrapper[4838]: I0202 10:58:35.071231 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64b902a9-d3f3-45ea-bb08-385972ffe5f9-config\") pod \"route-controller-manager-64ccff8bcf-hbvzz\" (UID: \"64b902a9-d3f3-45ea-bb08-385972ffe5f9\") " pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" Feb 02 10:58:35 crc kubenswrapper[4838]: I0202 10:58:35.073109 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64b902a9-d3f3-45ea-bb08-385972ffe5f9-serving-cert\") pod \"route-controller-manager-64ccff8bcf-hbvzz\" (UID: \"64b902a9-d3f3-45ea-bb08-385972ffe5f9\") " pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" Feb 02 10:58:35 crc kubenswrapper[4838]: I0202 10:58:35.090833 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqmkd\" (UniqueName: \"kubernetes.io/projected/64b902a9-d3f3-45ea-bb08-385972ffe5f9-kube-api-access-fqmkd\") pod \"route-controller-manager-64ccff8bcf-hbvzz\" (UID: \"64b902a9-d3f3-45ea-bb08-385972ffe5f9\") " pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" Feb 02 10:58:35 crc kubenswrapper[4838]: I0202 10:58:35.218382 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" Feb 02 10:58:35 crc kubenswrapper[4838]: I0202 10:58:35.748337 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz"] Feb 02 10:58:35 crc kubenswrapper[4838]: I0202 10:58:35.977891 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" event={"ID":"64b902a9-d3f3-45ea-bb08-385972ffe5f9","Type":"ContainerStarted","Data":"461b2cffb8344f65b776d7631db223cbf69a1463f4274e93f3de5da89210c877"} Feb 02 10:58:36 crc kubenswrapper[4838]: I0202 10:58:36.514930 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e9e9517-6d6f-4fdd-874c-4095c05415c9" path="/var/lib/kubelet/pods/5e9e9517-6d6f-4fdd-874c-4095c05415c9/volumes" Feb 02 10:58:36 crc kubenswrapper[4838]: I0202 10:58:36.985641 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" event={"ID":"64b902a9-d3f3-45ea-bb08-385972ffe5f9","Type":"ContainerStarted","Data":"07dcef70806994cd6318ee2625e788e7f5013e8271216b9aca2b48e6c3a4345a"} Feb 02 10:58:36 crc kubenswrapper[4838]: I0202 10:58:36.985926 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" Feb 02 10:58:36 crc kubenswrapper[4838]: I0202 10:58:36.995833 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" Feb 02 10:58:37 crc kubenswrapper[4838]: I0202 10:58:37.019419 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" podStartSLOduration=4.019392271 podStartE2EDuration="4.019392271s" podCreationTimestamp="2026-02-02 10:58:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:58:37.007366503 +0000 UTC m=+311.344467531" watchObservedRunningTime="2026-02-02 10:58:37.019392271 +0000 UTC m=+311.356493309" Feb 02 10:59:13 crc kubenswrapper[4838]: I0202 10:59:13.519267 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz"] Feb 02 10:59:13 crc kubenswrapper[4838]: I0202 10:59:13.520140 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" podUID="64b902a9-d3f3-45ea-bb08-385972ffe5f9" containerName="route-controller-manager" containerID="cri-o://07dcef70806994cd6318ee2625e788e7f5013e8271216b9aca2b48e6c3a4345a" gracePeriod=30 Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.242304 4838 generic.go:334] "Generic (PLEG): container finished" podID="64b902a9-d3f3-45ea-bb08-385972ffe5f9" containerID="07dcef70806994cd6318ee2625e788e7f5013e8271216b9aca2b48e6c3a4345a" exitCode=0 Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.242362 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" event={"ID":"64b902a9-d3f3-45ea-bb08-385972ffe5f9","Type":"ContainerDied","Data":"07dcef70806994cd6318ee2625e788e7f5013e8271216b9aca2b48e6c3a4345a"} Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.458716 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.512682 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/64b902a9-d3f3-45ea-bb08-385972ffe5f9-client-ca\") pod \"64b902a9-d3f3-45ea-bb08-385972ffe5f9\" (UID: \"64b902a9-d3f3-45ea-bb08-385972ffe5f9\") " Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.512978 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64b902a9-d3f3-45ea-bb08-385972ffe5f9-serving-cert\") pod \"64b902a9-d3f3-45ea-bb08-385972ffe5f9\" (UID: \"64b902a9-d3f3-45ea-bb08-385972ffe5f9\") " Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.513056 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqmkd\" (UniqueName: \"kubernetes.io/projected/64b902a9-d3f3-45ea-bb08-385972ffe5f9-kube-api-access-fqmkd\") pod \"64b902a9-d3f3-45ea-bb08-385972ffe5f9\" (UID: \"64b902a9-d3f3-45ea-bb08-385972ffe5f9\") " Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.513084 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64b902a9-d3f3-45ea-bb08-385972ffe5f9-config\") pod \"64b902a9-d3f3-45ea-bb08-385972ffe5f9\" (UID: \"64b902a9-d3f3-45ea-bb08-385972ffe5f9\") " Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.513848 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64b902a9-d3f3-45ea-bb08-385972ffe5f9-config" (OuterVolumeSpecName: "config") pod "64b902a9-d3f3-45ea-bb08-385972ffe5f9" (UID: "64b902a9-d3f3-45ea-bb08-385972ffe5f9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.514242 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64b902a9-d3f3-45ea-bb08-385972ffe5f9-client-ca" (OuterVolumeSpecName: "client-ca") pod "64b902a9-d3f3-45ea-bb08-385972ffe5f9" (UID: "64b902a9-d3f3-45ea-bb08-385972ffe5f9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.520787 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64b902a9-d3f3-45ea-bb08-385972ffe5f9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "64b902a9-d3f3-45ea-bb08-385972ffe5f9" (UID: "64b902a9-d3f3-45ea-bb08-385972ffe5f9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.524933 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64b902a9-d3f3-45ea-bb08-385972ffe5f9-kube-api-access-fqmkd" (OuterVolumeSpecName: "kube-api-access-fqmkd") pod "64b902a9-d3f3-45ea-bb08-385972ffe5f9" (UID: "64b902a9-d3f3-45ea-bb08-385972ffe5f9"). InnerVolumeSpecName "kube-api-access-fqmkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.614661 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqmkd\" (UniqueName: \"kubernetes.io/projected/64b902a9-d3f3-45ea-bb08-385972ffe5f9-kube-api-access-fqmkd\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.615002 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64b902a9-d3f3-45ea-bb08-385972ffe5f9-config\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.615138 4838 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/64b902a9-d3f3-45ea-bb08-385972ffe5f9-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.615271 4838 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64b902a9-d3f3-45ea-bb08-385972ffe5f9-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.843319 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj"] Feb 02 10:59:14 crc kubenswrapper[4838]: E0202 10:59:14.844028 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64b902a9-d3f3-45ea-bb08-385972ffe5f9" containerName="route-controller-manager" Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.844228 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="64b902a9-d3f3-45ea-bb08-385972ffe5f9" containerName="route-controller-manager" Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.844605 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="64b902a9-d3f3-45ea-bb08-385972ffe5f9" containerName="route-controller-manager" Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.845455 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj" Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.854897 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj"] Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.917847 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b5d9601-d274-4dcc-a7de-0c9bc2f8773a-config\") pod \"route-controller-manager-7b57bbc5d8-b2zmj\" (UID: \"9b5d9601-d274-4dcc-a7de-0c9bc2f8773a\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj" Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.917891 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9b5d9601-d274-4dcc-a7de-0c9bc2f8773a-serving-cert\") pod \"route-controller-manager-7b57bbc5d8-b2zmj\" (UID: \"9b5d9601-d274-4dcc-a7de-0c9bc2f8773a\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj" Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.918036 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9b5d9601-d274-4dcc-a7de-0c9bc2f8773a-client-ca\") pod \"route-controller-manager-7b57bbc5d8-b2zmj\" (UID: \"9b5d9601-d274-4dcc-a7de-0c9bc2f8773a\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj" Feb 02 10:59:14 crc kubenswrapper[4838]: I0202 10:59:14.918068 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hvp7\" (UniqueName: \"kubernetes.io/projected/9b5d9601-d274-4dcc-a7de-0c9bc2f8773a-kube-api-access-8hvp7\") pod \"route-controller-manager-7b57bbc5d8-b2zmj\" (UID: \"9b5d9601-d274-4dcc-a7de-0c9bc2f8773a\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj" Feb 02 10:59:15 crc kubenswrapper[4838]: I0202 10:59:15.020085 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9b5d9601-d274-4dcc-a7de-0c9bc2f8773a-client-ca\") pod \"route-controller-manager-7b57bbc5d8-b2zmj\" (UID: \"9b5d9601-d274-4dcc-a7de-0c9bc2f8773a\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj" Feb 02 10:59:15 crc kubenswrapper[4838]: I0202 10:59:15.020179 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hvp7\" (UniqueName: \"kubernetes.io/projected/9b5d9601-d274-4dcc-a7de-0c9bc2f8773a-kube-api-access-8hvp7\") pod \"route-controller-manager-7b57bbc5d8-b2zmj\" (UID: \"9b5d9601-d274-4dcc-a7de-0c9bc2f8773a\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj" Feb 02 10:59:15 crc kubenswrapper[4838]: I0202 10:59:15.020238 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b5d9601-d274-4dcc-a7de-0c9bc2f8773a-config\") pod \"route-controller-manager-7b57bbc5d8-b2zmj\" (UID: \"9b5d9601-d274-4dcc-a7de-0c9bc2f8773a\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj" Feb 02 10:59:15 crc kubenswrapper[4838]: I0202 10:59:15.020275 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9b5d9601-d274-4dcc-a7de-0c9bc2f8773a-serving-cert\") pod \"route-controller-manager-7b57bbc5d8-b2zmj\" (UID: \"9b5d9601-d274-4dcc-a7de-0c9bc2f8773a\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj" Feb 02 10:59:15 crc kubenswrapper[4838]: I0202 10:59:15.021835 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9b5d9601-d274-4dcc-a7de-0c9bc2f8773a-client-ca\") pod \"route-controller-manager-7b57bbc5d8-b2zmj\" (UID: \"9b5d9601-d274-4dcc-a7de-0c9bc2f8773a\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj" Feb 02 10:59:15 crc kubenswrapper[4838]: I0202 10:59:15.022544 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b5d9601-d274-4dcc-a7de-0c9bc2f8773a-config\") pod \"route-controller-manager-7b57bbc5d8-b2zmj\" (UID: \"9b5d9601-d274-4dcc-a7de-0c9bc2f8773a\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj" Feb 02 10:59:15 crc kubenswrapper[4838]: I0202 10:59:15.025435 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9b5d9601-d274-4dcc-a7de-0c9bc2f8773a-serving-cert\") pod \"route-controller-manager-7b57bbc5d8-b2zmj\" (UID: \"9b5d9601-d274-4dcc-a7de-0c9bc2f8773a\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj" Feb 02 10:59:15 crc kubenswrapper[4838]: I0202 10:59:15.044116 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hvp7\" (UniqueName: \"kubernetes.io/projected/9b5d9601-d274-4dcc-a7de-0c9bc2f8773a-kube-api-access-8hvp7\") pod \"route-controller-manager-7b57bbc5d8-b2zmj\" (UID: \"9b5d9601-d274-4dcc-a7de-0c9bc2f8773a\") " pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj" Feb 02 10:59:15 crc kubenswrapper[4838]: I0202 10:59:15.168192 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj" Feb 02 10:59:15 crc kubenswrapper[4838]: I0202 10:59:15.249642 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" event={"ID":"64b902a9-d3f3-45ea-bb08-385972ffe5f9","Type":"ContainerDied","Data":"461b2cffb8344f65b776d7631db223cbf69a1463f4274e93f3de5da89210c877"} Feb 02 10:59:15 crc kubenswrapper[4838]: I0202 10:59:15.249705 4838 scope.go:117] "RemoveContainer" containerID="07dcef70806994cd6318ee2625e788e7f5013e8271216b9aca2b48e6c3a4345a" Feb 02 10:59:15 crc kubenswrapper[4838]: I0202 10:59:15.249703 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz" Feb 02 10:59:15 crc kubenswrapper[4838]: I0202 10:59:15.282018 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz"] Feb 02 10:59:15 crc kubenswrapper[4838]: I0202 10:59:15.284792 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64ccff8bcf-hbvzz"] Feb 02 10:59:15 crc kubenswrapper[4838]: I0202 10:59:15.376132 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj"] Feb 02 10:59:15 crc kubenswrapper[4838]: I0202 10:59:15.429863 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 10:59:15 crc kubenswrapper[4838]: I0202 10:59:15.429943 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 10:59:16 crc kubenswrapper[4838]: I0202 10:59:16.256782 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj" event={"ID":"9b5d9601-d274-4dcc-a7de-0c9bc2f8773a","Type":"ContainerStarted","Data":"5674da2d8166186b82ed9bc52404d3d6b8306bbb096ce849fd98e59a3c805973"} Feb 02 10:59:16 crc kubenswrapper[4838]: I0202 10:59:16.256830 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj" event={"ID":"9b5d9601-d274-4dcc-a7de-0c9bc2f8773a","Type":"ContainerStarted","Data":"f972dac164e0332464b8fae059452edb5b242db9e5c30986d1992484de726fe7"} Feb 02 10:59:16 crc kubenswrapper[4838]: I0202 10:59:16.258686 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj" Feb 02 10:59:16 crc kubenswrapper[4838]: I0202 10:59:16.266495 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj" Feb 02 10:59:16 crc kubenswrapper[4838]: I0202 10:59:16.287962 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7b57bbc5d8-b2zmj" podStartSLOduration=3.287938445 podStartE2EDuration="3.287938445s" podCreationTimestamp="2026-02-02 10:59:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:59:16.284662456 +0000 UTC m=+350.621763504" watchObservedRunningTime="2026-02-02 10:59:16.287938445 +0000 UTC m=+350.625039493" Feb 02 10:59:16 crc kubenswrapper[4838]: I0202 10:59:16.513935 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64b902a9-d3f3-45ea-bb08-385972ffe5f9" path="/var/lib/kubelet/pods/64b902a9-d3f3-45ea-bb08-385972ffe5f9/volumes" Feb 02 10:59:25 crc kubenswrapper[4838]: I0202 10:59:25.955960 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-6szx6"] Feb 02 10:59:25 crc kubenswrapper[4838]: I0202 10:59:25.957311 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:25 crc kubenswrapper[4838]: I0202 10:59:25.974698 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-6szx6"] Feb 02 10:59:25 crc kubenswrapper[4838]: I0202 10:59:25.993977 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/42420eee-ce09-4ff8-8617-a13a43a555ff-registry-certificates\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:25 crc kubenswrapper[4838]: I0202 10:59:25.994031 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24wd6\" (UniqueName: \"kubernetes.io/projected/42420eee-ce09-4ff8-8617-a13a43a555ff-kube-api-access-24wd6\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:25 crc kubenswrapper[4838]: I0202 10:59:25.994127 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/42420eee-ce09-4ff8-8617-a13a43a555ff-ca-trust-extracted\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:25 crc kubenswrapper[4838]: I0202 10:59:25.994167 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/42420eee-ce09-4ff8-8617-a13a43a555ff-installation-pull-secrets\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:25 crc kubenswrapper[4838]: I0202 10:59:25.994209 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:25 crc kubenswrapper[4838]: I0202 10:59:25.994276 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/42420eee-ce09-4ff8-8617-a13a43a555ff-trusted-ca\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:25 crc kubenswrapper[4838]: I0202 10:59:25.994326 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/42420eee-ce09-4ff8-8617-a13a43a555ff-bound-sa-token\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:25 crc kubenswrapper[4838]: I0202 10:59:25.994355 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/42420eee-ce09-4ff8-8617-a13a43a555ff-registry-tls\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:26 crc kubenswrapper[4838]: I0202 10:59:26.027104 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:26 crc kubenswrapper[4838]: I0202 10:59:26.095239 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/42420eee-ce09-4ff8-8617-a13a43a555ff-trusted-ca\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:26 crc kubenswrapper[4838]: I0202 10:59:26.095320 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/42420eee-ce09-4ff8-8617-a13a43a555ff-bound-sa-token\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:26 crc kubenswrapper[4838]: I0202 10:59:26.095349 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/42420eee-ce09-4ff8-8617-a13a43a555ff-registry-tls\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:26 crc kubenswrapper[4838]: I0202 10:59:26.095398 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/42420eee-ce09-4ff8-8617-a13a43a555ff-registry-certificates\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:26 crc kubenswrapper[4838]: I0202 10:59:26.095422 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24wd6\" (UniqueName: \"kubernetes.io/projected/42420eee-ce09-4ff8-8617-a13a43a555ff-kube-api-access-24wd6\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:26 crc kubenswrapper[4838]: I0202 10:59:26.095442 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/42420eee-ce09-4ff8-8617-a13a43a555ff-ca-trust-extracted\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:26 crc kubenswrapper[4838]: I0202 10:59:26.095473 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/42420eee-ce09-4ff8-8617-a13a43a555ff-installation-pull-secrets\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:26 crc kubenswrapper[4838]: I0202 10:59:26.096886 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/42420eee-ce09-4ff8-8617-a13a43a555ff-ca-trust-extracted\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:26 crc kubenswrapper[4838]: I0202 10:59:26.097256 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/42420eee-ce09-4ff8-8617-a13a43a555ff-trusted-ca\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:26 crc kubenswrapper[4838]: I0202 10:59:26.097704 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/42420eee-ce09-4ff8-8617-a13a43a555ff-registry-certificates\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:26 crc kubenswrapper[4838]: I0202 10:59:26.102370 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/42420eee-ce09-4ff8-8617-a13a43a555ff-installation-pull-secrets\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:26 crc kubenswrapper[4838]: I0202 10:59:26.104483 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/42420eee-ce09-4ff8-8617-a13a43a555ff-registry-tls\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:26 crc kubenswrapper[4838]: I0202 10:59:26.112288 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/42420eee-ce09-4ff8-8617-a13a43a555ff-bound-sa-token\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:26 crc kubenswrapper[4838]: I0202 10:59:26.118847 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24wd6\" (UniqueName: \"kubernetes.io/projected/42420eee-ce09-4ff8-8617-a13a43a555ff-kube-api-access-24wd6\") pod \"image-registry-66df7c8f76-6szx6\" (UID: \"42420eee-ce09-4ff8-8617-a13a43a555ff\") " pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:26 crc kubenswrapper[4838]: I0202 10:59:26.285845 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:26 crc kubenswrapper[4838]: I0202 10:59:26.735929 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-6szx6"] Feb 02 10:59:27 crc kubenswrapper[4838]: I0202 10:59:27.330415 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" event={"ID":"42420eee-ce09-4ff8-8617-a13a43a555ff","Type":"ContainerStarted","Data":"fe9a5c1291cffd38937a9d3bbe785f85f797ea51969a3c9064b03cefe84bc8ba"} Feb 02 10:59:27 crc kubenswrapper[4838]: I0202 10:59:27.330790 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" event={"ID":"42420eee-ce09-4ff8-8617-a13a43a555ff","Type":"ContainerStarted","Data":"547c197222ef89fff7b6cdf58ac198122c6d3a8481c0f233a6dd130ccdbf8b4b"} Feb 02 10:59:27 crc kubenswrapper[4838]: I0202 10:59:27.332117 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:39 crc kubenswrapper[4838]: I0202 10:59:39.793982 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" podStartSLOduration=14.793963845 podStartE2EDuration="14.793963845s" podCreationTimestamp="2026-02-02 10:59:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:59:27.360828269 +0000 UTC m=+361.697929357" watchObservedRunningTime="2026-02-02 10:59:39.793963845 +0000 UTC m=+374.131064873" Feb 02 10:59:39 crc kubenswrapper[4838]: I0202 10:59:39.798923 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rbh6x"] Feb 02 10:59:39 crc kubenswrapper[4838]: I0202 10:59:39.799182 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rbh6x" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" containerName="registry-server" containerID="cri-o://90cc1e5c3c847d7308b5f92754f2bcd257d5aa847e6bc2f7927ed934d636743c" gracePeriod=2 Feb 02 10:59:40 crc kubenswrapper[4838]: I0202 10:59:40.423147 4838 generic.go:334] "Generic (PLEG): container finished" podID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" containerID="90cc1e5c3c847d7308b5f92754f2bcd257d5aa847e6bc2f7927ed934d636743c" exitCode=0 Feb 02 10:59:40 crc kubenswrapper[4838]: I0202 10:59:40.423198 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rbh6x" event={"ID":"21ad3b27-d69d-4db6-a1c6-fac312ad582d","Type":"ContainerDied","Data":"90cc1e5c3c847d7308b5f92754f2bcd257d5aa847e6bc2f7927ed934d636743c"} Feb 02 10:59:40 crc kubenswrapper[4838]: I0202 10:59:40.718241 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rbh6x" Feb 02 10:59:40 crc kubenswrapper[4838]: I0202 10:59:40.755838 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8wwd\" (UniqueName: \"kubernetes.io/projected/21ad3b27-d69d-4db6-a1c6-fac312ad582d-kube-api-access-l8wwd\") pod \"21ad3b27-d69d-4db6-a1c6-fac312ad582d\" (UID: \"21ad3b27-d69d-4db6-a1c6-fac312ad582d\") " Feb 02 10:59:40 crc kubenswrapper[4838]: I0202 10:59:40.755943 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21ad3b27-d69d-4db6-a1c6-fac312ad582d-utilities\") pod \"21ad3b27-d69d-4db6-a1c6-fac312ad582d\" (UID: \"21ad3b27-d69d-4db6-a1c6-fac312ad582d\") " Feb 02 10:59:40 crc kubenswrapper[4838]: I0202 10:59:40.755979 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21ad3b27-d69d-4db6-a1c6-fac312ad582d-catalog-content\") pod \"21ad3b27-d69d-4db6-a1c6-fac312ad582d\" (UID: \"21ad3b27-d69d-4db6-a1c6-fac312ad582d\") " Feb 02 10:59:40 crc kubenswrapper[4838]: I0202 10:59:40.759589 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21ad3b27-d69d-4db6-a1c6-fac312ad582d-utilities" (OuterVolumeSpecName: "utilities") pod "21ad3b27-d69d-4db6-a1c6-fac312ad582d" (UID: "21ad3b27-d69d-4db6-a1c6-fac312ad582d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:59:40 crc kubenswrapper[4838]: I0202 10:59:40.762427 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21ad3b27-d69d-4db6-a1c6-fac312ad582d-kube-api-access-l8wwd" (OuterVolumeSpecName: "kube-api-access-l8wwd") pod "21ad3b27-d69d-4db6-a1c6-fac312ad582d" (UID: "21ad3b27-d69d-4db6-a1c6-fac312ad582d"). InnerVolumeSpecName "kube-api-access-l8wwd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:59:40 crc kubenswrapper[4838]: I0202 10:59:40.803939 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21ad3b27-d69d-4db6-a1c6-fac312ad582d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "21ad3b27-d69d-4db6-a1c6-fac312ad582d" (UID: "21ad3b27-d69d-4db6-a1c6-fac312ad582d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:59:40 crc kubenswrapper[4838]: I0202 10:59:40.856709 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21ad3b27-d69d-4db6-a1c6-fac312ad582d-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:40 crc kubenswrapper[4838]: I0202 10:59:40.856739 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21ad3b27-d69d-4db6-a1c6-fac312ad582d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:40 crc kubenswrapper[4838]: I0202 10:59:40.856751 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8wwd\" (UniqueName: \"kubernetes.io/projected/21ad3b27-d69d-4db6-a1c6-fac312ad582d-kube-api-access-l8wwd\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:41 crc kubenswrapper[4838]: I0202 10:59:41.431574 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rbh6x" event={"ID":"21ad3b27-d69d-4db6-a1c6-fac312ad582d","Type":"ContainerDied","Data":"119aa7b425e1a760f6b86ab52cb7c3033d1b4c6642e7373cf0a0243a4b530651"} Feb 02 10:59:41 crc kubenswrapper[4838]: I0202 10:59:41.431974 4838 scope.go:117] "RemoveContainer" containerID="90cc1e5c3c847d7308b5f92754f2bcd257d5aa847e6bc2f7927ed934d636743c" Feb 02 10:59:41 crc kubenswrapper[4838]: I0202 10:59:41.431662 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rbh6x" Feb 02 10:59:41 crc kubenswrapper[4838]: I0202 10:59:41.450722 4838 scope.go:117] "RemoveContainer" containerID="5405f4d10679447c248fc2347f0304c5a77c4451a5ba5b9c0a55f719a3358eda" Feb 02 10:59:41 crc kubenswrapper[4838]: I0202 10:59:41.467435 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rbh6x"] Feb 02 10:59:41 crc kubenswrapper[4838]: I0202 10:59:41.471371 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rbh6x"] Feb 02 10:59:41 crc kubenswrapper[4838]: I0202 10:59:41.494895 4838 scope.go:117] "RemoveContainer" containerID="a6729b9ce4581175317d8877173a9d3924e8676fec1c27767d4bb918351af12c" Feb 02 10:59:42 crc kubenswrapper[4838]: I0202 10:59:42.199549 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qwkhj"] Feb 02 10:59:42 crc kubenswrapper[4838]: I0202 10:59:42.200060 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qwkhj" podUID="f98a7f3b-5730-4469-aef3-188a9755f566" containerName="registry-server" containerID="cri-o://202b4aef59408ecb2a0f098f89c87e44fdf75bd3760f3ef1ae81e786ed0da8dc" gracePeriod=2 Feb 02 10:59:42 crc kubenswrapper[4838]: I0202 10:59:42.445453 4838 generic.go:334] "Generic (PLEG): container finished" podID="f98a7f3b-5730-4469-aef3-188a9755f566" containerID="202b4aef59408ecb2a0f098f89c87e44fdf75bd3760f3ef1ae81e786ed0da8dc" exitCode=0 Feb 02 10:59:42 crc kubenswrapper[4838]: I0202 10:59:42.445563 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qwkhj" event={"ID":"f98a7f3b-5730-4469-aef3-188a9755f566","Type":"ContainerDied","Data":"202b4aef59408ecb2a0f098f89c87e44fdf75bd3760f3ef1ae81e786ed0da8dc"} Feb 02 10:59:42 crc kubenswrapper[4838]: I0202 10:59:42.517930 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" path="/var/lib/kubelet/pods/21ad3b27-d69d-4db6-a1c6-fac312ad582d/volumes" Feb 02 10:59:43 crc kubenswrapper[4838]: I0202 10:59:43.146983 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qwkhj" Feb 02 10:59:43 crc kubenswrapper[4838]: I0202 10:59:43.290549 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f98a7f3b-5730-4469-aef3-188a9755f566-catalog-content\") pod \"f98a7f3b-5730-4469-aef3-188a9755f566\" (UID: \"f98a7f3b-5730-4469-aef3-188a9755f566\") " Feb 02 10:59:43 crc kubenswrapper[4838]: I0202 10:59:43.290759 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwqfm\" (UniqueName: \"kubernetes.io/projected/f98a7f3b-5730-4469-aef3-188a9755f566-kube-api-access-wwqfm\") pod \"f98a7f3b-5730-4469-aef3-188a9755f566\" (UID: \"f98a7f3b-5730-4469-aef3-188a9755f566\") " Feb 02 10:59:43 crc kubenswrapper[4838]: I0202 10:59:43.290791 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f98a7f3b-5730-4469-aef3-188a9755f566-utilities\") pod \"f98a7f3b-5730-4469-aef3-188a9755f566\" (UID: \"f98a7f3b-5730-4469-aef3-188a9755f566\") " Feb 02 10:59:43 crc kubenswrapper[4838]: I0202 10:59:43.294064 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f98a7f3b-5730-4469-aef3-188a9755f566-utilities" (OuterVolumeSpecName: "utilities") pod "f98a7f3b-5730-4469-aef3-188a9755f566" (UID: "f98a7f3b-5730-4469-aef3-188a9755f566"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:59:43 crc kubenswrapper[4838]: I0202 10:59:43.301514 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f98a7f3b-5730-4469-aef3-188a9755f566-kube-api-access-wwqfm" (OuterVolumeSpecName: "kube-api-access-wwqfm") pod "f98a7f3b-5730-4469-aef3-188a9755f566" (UID: "f98a7f3b-5730-4469-aef3-188a9755f566"). InnerVolumeSpecName "kube-api-access-wwqfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:59:43 crc kubenswrapper[4838]: I0202 10:59:43.393034 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwqfm\" (UniqueName: \"kubernetes.io/projected/f98a7f3b-5730-4469-aef3-188a9755f566-kube-api-access-wwqfm\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:43 crc kubenswrapper[4838]: I0202 10:59:43.393075 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f98a7f3b-5730-4469-aef3-188a9755f566-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:43 crc kubenswrapper[4838]: I0202 10:59:43.410542 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f98a7f3b-5730-4469-aef3-188a9755f566-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f98a7f3b-5730-4469-aef3-188a9755f566" (UID: "f98a7f3b-5730-4469-aef3-188a9755f566"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:59:43 crc kubenswrapper[4838]: I0202 10:59:43.462143 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qwkhj" event={"ID":"f98a7f3b-5730-4469-aef3-188a9755f566","Type":"ContainerDied","Data":"f75e5c9f8c68500780b40c97a17cd8e820d1a5817e9523454a5b83ea87c937d6"} Feb 02 10:59:43 crc kubenswrapper[4838]: I0202 10:59:43.462207 4838 scope.go:117] "RemoveContainer" containerID="202b4aef59408ecb2a0f098f89c87e44fdf75bd3760f3ef1ae81e786ed0da8dc" Feb 02 10:59:43 crc kubenswrapper[4838]: I0202 10:59:43.462348 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qwkhj" Feb 02 10:59:43 crc kubenswrapper[4838]: I0202 10:59:43.494471 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f98a7f3b-5730-4469-aef3-188a9755f566-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:43 crc kubenswrapper[4838]: I0202 10:59:43.494633 4838 scope.go:117] "RemoveContainer" containerID="54672b04a5c34d13586d6761222042345c79e24d51cc22c071f8c47ef4dbe060" Feb 02 10:59:43 crc kubenswrapper[4838]: I0202 10:59:43.526265 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qwkhj"] Feb 02 10:59:43 crc kubenswrapper[4838]: I0202 10:59:43.526344 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qwkhj"] Feb 02 10:59:43 crc kubenswrapper[4838]: I0202 10:59:43.540521 4838 scope.go:117] "RemoveContainer" containerID="457e340060e298a95c675e0ea2a8427004bc52f326921279a81f22731d8ea978" Feb 02 10:59:44 crc kubenswrapper[4838]: I0202 10:59:44.514014 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f98a7f3b-5730-4469-aef3-188a9755f566" path="/var/lib/kubelet/pods/f98a7f3b-5730-4469-aef3-188a9755f566/volumes" Feb 02 10:59:45 crc kubenswrapper[4838]: I0202 10:59:45.429709 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 10:59:45 crc kubenswrapper[4838]: I0202 10:59:45.429789 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 10:59:46 crc kubenswrapper[4838]: I0202 10:59:46.291822 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-6szx6" Feb 02 10:59:46 crc kubenswrapper[4838]: I0202 10:59:46.355279 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8b9cc"] Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.376490 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m4f9j"] Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.377120 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-m4f9j" podUID="cdb7ade3-e1b6-436b-a5df-3abb972b72fa" containerName="registry-server" containerID="cri-o://087fedd9713829e266f735e578f7a5c5329112bf55b6288ef5410d669a789c2c" gracePeriod=30 Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.383814 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ntc2n"] Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.384069 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ntc2n" podUID="d7b6d22c-5441-4f5c-830b-17d67446352d" containerName="registry-server" containerID="cri-o://eab2da227dc61318787dee5786dede55b7ba45d35e1fa080d6ebb36b8f39b2f3" gracePeriod=30 Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.392546 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s96vn"] Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.392761 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" podUID="3bffe14a-0216-4854-b0fc-7c482a297b82" containerName="marketplace-operator" containerID="cri-o://010ec0a466638dfb9946541c87b10692e080aa0356ad1355b5835782286cc670" gracePeriod=30 Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.403954 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tnscg"] Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.404344 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tnscg" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" containerName="registry-server" containerID="cri-o://e3c356ae730bee9d068384a6a605b708810afff935b31f6cfb83a350d9effaa5" gracePeriod=30 Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.420362 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pl5pl"] Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.420652 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pl5pl" podUID="193925e0-1419-444b-9e75-ed7371081181" containerName="registry-server" containerID="cri-o://1ccf99eff81a9257eb4c7e3fe871e1b87b43262f9d7034992255a0f579ebe3cf" gracePeriod=30 Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.431983 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2kxfd"] Feb 02 10:59:47 crc kubenswrapper[4838]: E0202 10:59:47.432231 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" containerName="extract-utilities" Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.432244 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" containerName="extract-utilities" Feb 02 10:59:47 crc kubenswrapper[4838]: E0202 10:59:47.432258 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" containerName="extract-content" Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.432265 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" containerName="extract-content" Feb 02 10:59:47 crc kubenswrapper[4838]: E0202 10:59:47.432275 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" containerName="registry-server" Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.432282 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" containerName="registry-server" Feb 02 10:59:47 crc kubenswrapper[4838]: E0202 10:59:47.432292 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f98a7f3b-5730-4469-aef3-188a9755f566" containerName="extract-utilities" Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.432298 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="f98a7f3b-5730-4469-aef3-188a9755f566" containerName="extract-utilities" Feb 02 10:59:47 crc kubenswrapper[4838]: E0202 10:59:47.432305 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f98a7f3b-5730-4469-aef3-188a9755f566" containerName="extract-content" Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.432311 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="f98a7f3b-5730-4469-aef3-188a9755f566" containerName="extract-content" Feb 02 10:59:47 crc kubenswrapper[4838]: E0202 10:59:47.432320 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f98a7f3b-5730-4469-aef3-188a9755f566" containerName="registry-server" Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.432325 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="f98a7f3b-5730-4469-aef3-188a9755f566" containerName="registry-server" Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.432436 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="21ad3b27-d69d-4db6-a1c6-fac312ad582d" containerName="registry-server" Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.432447 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="f98a7f3b-5730-4469-aef3-188a9755f566" containerName="registry-server" Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.432841 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-2kxfd" Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.435461 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2kxfd"] Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.565426 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5214b35a-0948-41f6-b2d1-0dfc43009812-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-2kxfd\" (UID: \"5214b35a-0948-41f6-b2d1-0dfc43009812\") " pod="openshift-marketplace/marketplace-operator-79b997595-2kxfd" Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.565498 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5214b35a-0948-41f6-b2d1-0dfc43009812-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-2kxfd\" (UID: \"5214b35a-0948-41f6-b2d1-0dfc43009812\") " pod="openshift-marketplace/marketplace-operator-79b997595-2kxfd" Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.565745 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h42mq\" (UniqueName: \"kubernetes.io/projected/5214b35a-0948-41f6-b2d1-0dfc43009812-kube-api-access-h42mq\") pod \"marketplace-operator-79b997595-2kxfd\" (UID: \"5214b35a-0948-41f6-b2d1-0dfc43009812\") " pod="openshift-marketplace/marketplace-operator-79b997595-2kxfd" Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.666695 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h42mq\" (UniqueName: \"kubernetes.io/projected/5214b35a-0948-41f6-b2d1-0dfc43009812-kube-api-access-h42mq\") pod \"marketplace-operator-79b997595-2kxfd\" (UID: \"5214b35a-0948-41f6-b2d1-0dfc43009812\") " pod="openshift-marketplace/marketplace-operator-79b997595-2kxfd" Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.667064 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5214b35a-0948-41f6-b2d1-0dfc43009812-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-2kxfd\" (UID: \"5214b35a-0948-41f6-b2d1-0dfc43009812\") " pod="openshift-marketplace/marketplace-operator-79b997595-2kxfd" Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.667130 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5214b35a-0948-41f6-b2d1-0dfc43009812-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-2kxfd\" (UID: \"5214b35a-0948-41f6-b2d1-0dfc43009812\") " pod="openshift-marketplace/marketplace-operator-79b997595-2kxfd" Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.670201 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5214b35a-0948-41f6-b2d1-0dfc43009812-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-2kxfd\" (UID: \"5214b35a-0948-41f6-b2d1-0dfc43009812\") " pod="openshift-marketplace/marketplace-operator-79b997595-2kxfd" Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.680466 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5214b35a-0948-41f6-b2d1-0dfc43009812-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-2kxfd\" (UID: \"5214b35a-0948-41f6-b2d1-0dfc43009812\") " pod="openshift-marketplace/marketplace-operator-79b997595-2kxfd" Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.685945 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h42mq\" (UniqueName: \"kubernetes.io/projected/5214b35a-0948-41f6-b2d1-0dfc43009812-kube-api-access-h42mq\") pod \"marketplace-operator-79b997595-2kxfd\" (UID: \"5214b35a-0948-41f6-b2d1-0dfc43009812\") " pod="openshift-marketplace/marketplace-operator-79b997595-2kxfd" Feb 02 10:59:47 crc kubenswrapper[4838]: E0202 10:59:47.705042 4838 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1ccf99eff81a9257eb4c7e3fe871e1b87b43262f9d7034992255a0f579ebe3cf is running failed: container process not found" containerID="1ccf99eff81a9257eb4c7e3fe871e1b87b43262f9d7034992255a0f579ebe3cf" cmd=["grpc_health_probe","-addr=:50051"] Feb 02 10:59:47 crc kubenswrapper[4838]: E0202 10:59:47.708862 4838 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1ccf99eff81a9257eb4c7e3fe871e1b87b43262f9d7034992255a0f579ebe3cf is running failed: container process not found" containerID="1ccf99eff81a9257eb4c7e3fe871e1b87b43262f9d7034992255a0f579ebe3cf" cmd=["grpc_health_probe","-addr=:50051"] Feb 02 10:59:47 crc kubenswrapper[4838]: E0202 10:59:47.709312 4838 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1ccf99eff81a9257eb4c7e3fe871e1b87b43262f9d7034992255a0f579ebe3cf is running failed: container process not found" containerID="1ccf99eff81a9257eb4c7e3fe871e1b87b43262f9d7034992255a0f579ebe3cf" cmd=["grpc_health_probe","-addr=:50051"] Feb 02 10:59:47 crc kubenswrapper[4838]: E0202 10:59:47.709396 4838 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1ccf99eff81a9257eb4c7e3fe871e1b87b43262f9d7034992255a0f579ebe3cf is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-pl5pl" podUID="193925e0-1419-444b-9e75-ed7371081181" containerName="registry-server" Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.840759 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-2kxfd" Feb 02 10:59:47 crc kubenswrapper[4838]: I0202 10:59:47.940714 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m4f9j" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.074975 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cdb7ade3-e1b6-436b-a5df-3abb972b72fa-catalog-content\") pod \"cdb7ade3-e1b6-436b-a5df-3abb972b72fa\" (UID: \"cdb7ade3-e1b6-436b-a5df-3abb972b72fa\") " Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.075049 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cdb7ade3-e1b6-436b-a5df-3abb972b72fa-utilities\") pod \"cdb7ade3-e1b6-436b-a5df-3abb972b72fa\" (UID: \"cdb7ade3-e1b6-436b-a5df-3abb972b72fa\") " Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.075087 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4fv8\" (UniqueName: \"kubernetes.io/projected/cdb7ade3-e1b6-436b-a5df-3abb972b72fa-kube-api-access-h4fv8\") pod \"cdb7ade3-e1b6-436b-a5df-3abb972b72fa\" (UID: \"cdb7ade3-e1b6-436b-a5df-3abb972b72fa\") " Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.076663 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cdb7ade3-e1b6-436b-a5df-3abb972b72fa-utilities" (OuterVolumeSpecName: "utilities") pod "cdb7ade3-e1b6-436b-a5df-3abb972b72fa" (UID: "cdb7ade3-e1b6-436b-a5df-3abb972b72fa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.088796 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdb7ade3-e1b6-436b-a5df-3abb972b72fa-kube-api-access-h4fv8" (OuterVolumeSpecName: "kube-api-access-h4fv8") pod "cdb7ade3-e1b6-436b-a5df-3abb972b72fa" (UID: "cdb7ade3-e1b6-436b-a5df-3abb972b72fa"). InnerVolumeSpecName "kube-api-access-h4fv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.143698 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cdb7ade3-e1b6-436b-a5df-3abb972b72fa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cdb7ade3-e1b6-436b-a5df-3abb972b72fa" (UID: "cdb7ade3-e1b6-436b-a5df-3abb972b72fa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.176384 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cdb7ade3-e1b6-436b-a5df-3abb972b72fa-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.176413 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4fv8\" (UniqueName: \"kubernetes.io/projected/cdb7ade3-e1b6-436b-a5df-3abb972b72fa-kube-api-access-h4fv8\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.176425 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cdb7ade3-e1b6-436b-a5df-3abb972b72fa-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.234036 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2kxfd"] Feb 02 10:59:48 crc kubenswrapper[4838]: W0202 10:59:48.243744 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5214b35a_0948_41f6_b2d1_0dfc43009812.slice/crio-2f62f70966a2d190ffc2c8d0408bd52600e545e9d2cb921ff6ea2a404ac23838 WatchSource:0}: Error finding container 2f62f70966a2d190ffc2c8d0408bd52600e545e9d2cb921ff6ea2a404ac23838: Status 404 returned error can't find the container with id 2f62f70966a2d190ffc2c8d0408bd52600e545e9d2cb921ff6ea2a404ac23838 Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.494703 4838 generic.go:334] "Generic (PLEG): container finished" podID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" containerID="e3c356ae730bee9d068384a6a605b708810afff935b31f6cfb83a350d9effaa5" exitCode=0 Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.494755 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tnscg" event={"ID":"bf0d00a9-64a4-46b9-9bc7-8617b0f3692a","Type":"ContainerDied","Data":"e3c356ae730bee9d068384a6a605b708810afff935b31f6cfb83a350d9effaa5"} Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.498411 4838 generic.go:334] "Generic (PLEG): container finished" podID="d7b6d22c-5441-4f5c-830b-17d67446352d" containerID="eab2da227dc61318787dee5786dede55b7ba45d35e1fa080d6ebb36b8f39b2f3" exitCode=0 Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.498450 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ntc2n" event={"ID":"d7b6d22c-5441-4f5c-830b-17d67446352d","Type":"ContainerDied","Data":"eab2da227dc61318787dee5786dede55b7ba45d35e1fa080d6ebb36b8f39b2f3"} Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.501246 4838 generic.go:334] "Generic (PLEG): container finished" podID="193925e0-1419-444b-9e75-ed7371081181" containerID="1ccf99eff81a9257eb4c7e3fe871e1b87b43262f9d7034992255a0f579ebe3cf" exitCode=0 Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.501289 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pl5pl" event={"ID":"193925e0-1419-444b-9e75-ed7371081181","Type":"ContainerDied","Data":"1ccf99eff81a9257eb4c7e3fe871e1b87b43262f9d7034992255a0f579ebe3cf"} Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.503831 4838 generic.go:334] "Generic (PLEG): container finished" podID="cdb7ade3-e1b6-436b-a5df-3abb972b72fa" containerID="087fedd9713829e266f735e578f7a5c5329112bf55b6288ef5410d669a789c2c" exitCode=0 Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.503863 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4f9j" event={"ID":"cdb7ade3-e1b6-436b-a5df-3abb972b72fa","Type":"ContainerDied","Data":"087fedd9713829e266f735e578f7a5c5329112bf55b6288ef5410d669a789c2c"} Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.503877 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4f9j" event={"ID":"cdb7ade3-e1b6-436b-a5df-3abb972b72fa","Type":"ContainerDied","Data":"bbb39d9aede0fdf1ec3e8965106b2d1d950461f5702c54ff1f8badf0f273aac3"} Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.503891 4838 scope.go:117] "RemoveContainer" containerID="087fedd9713829e266f735e578f7a5c5329112bf55b6288ef5410d669a789c2c" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.503976 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m4f9j" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.514668 4838 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-2kxfd container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.67:8080/healthz\": dial tcp 10.217.0.67:8080: connect: connection refused" start-of-body= Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.514709 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-2kxfd" podUID="5214b35a-0948-41f6-b2d1-0dfc43009812" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.67:8080/healthz\": dial tcp 10.217.0.67:8080: connect: connection refused" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.514757 4838 generic.go:334] "Generic (PLEG): container finished" podID="3bffe14a-0216-4854-b0fc-7c482a297b82" containerID="010ec0a466638dfb9946541c87b10692e080aa0356ad1355b5835782286cc670" exitCode=0 Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.514823 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-2kxfd" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.514847 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2kxfd" event={"ID":"5214b35a-0948-41f6-b2d1-0dfc43009812","Type":"ContainerStarted","Data":"f86e24c64649acea378eb843c91b85b3f86321eb9f029f27f5959e8fb80d5d06"} Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.514860 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2kxfd" event={"ID":"5214b35a-0948-41f6-b2d1-0dfc43009812","Type":"ContainerStarted","Data":"2f62f70966a2d190ffc2c8d0408bd52600e545e9d2cb921ff6ea2a404ac23838"} Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.514869 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" event={"ID":"3bffe14a-0216-4854-b0fc-7c482a297b82","Type":"ContainerDied","Data":"010ec0a466638dfb9946541c87b10692e080aa0356ad1355b5835782286cc670"} Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.556379 4838 scope.go:117] "RemoveContainer" containerID="7e87aa44dc34b117da005ccbe406770c8e9232b16085f5cd591430268174549b" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.604903 4838 scope.go:117] "RemoveContainer" containerID="64440857b2f78a077748d02ca562e0b1d843cf42acb620d2c48b15396ebe0672" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.619975 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.620889 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-2kxfd" podStartSLOduration=1.620849564 podStartE2EDuration="1.620849564s" podCreationTimestamp="2026-02-02 10:59:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 10:59:48.545954128 +0000 UTC m=+382.883055166" watchObservedRunningTime="2026-02-02 10:59:48.620849564 +0000 UTC m=+382.957950582" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.624115 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m4f9j"] Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.626195 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-m4f9j"] Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.633803 4838 scope.go:117] "RemoveContainer" containerID="087fedd9713829e266f735e578f7a5c5329112bf55b6288ef5410d669a789c2c" Feb 02 10:59:48 crc kubenswrapper[4838]: E0202 10:59:48.634285 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"087fedd9713829e266f735e578f7a5c5329112bf55b6288ef5410d669a789c2c\": container with ID starting with 087fedd9713829e266f735e578f7a5c5329112bf55b6288ef5410d669a789c2c not found: ID does not exist" containerID="087fedd9713829e266f735e578f7a5c5329112bf55b6288ef5410d669a789c2c" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.634326 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"087fedd9713829e266f735e578f7a5c5329112bf55b6288ef5410d669a789c2c"} err="failed to get container status \"087fedd9713829e266f735e578f7a5c5329112bf55b6288ef5410d669a789c2c\": rpc error: code = NotFound desc = could not find container \"087fedd9713829e266f735e578f7a5c5329112bf55b6288ef5410d669a789c2c\": container with ID starting with 087fedd9713829e266f735e578f7a5c5329112bf55b6288ef5410d669a789c2c not found: ID does not exist" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.634349 4838 scope.go:117] "RemoveContainer" containerID="7e87aa44dc34b117da005ccbe406770c8e9232b16085f5cd591430268174549b" Feb 02 10:59:48 crc kubenswrapper[4838]: E0202 10:59:48.634778 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e87aa44dc34b117da005ccbe406770c8e9232b16085f5cd591430268174549b\": container with ID starting with 7e87aa44dc34b117da005ccbe406770c8e9232b16085f5cd591430268174549b not found: ID does not exist" containerID="7e87aa44dc34b117da005ccbe406770c8e9232b16085f5cd591430268174549b" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.634814 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e87aa44dc34b117da005ccbe406770c8e9232b16085f5cd591430268174549b"} err="failed to get container status \"7e87aa44dc34b117da005ccbe406770c8e9232b16085f5cd591430268174549b\": rpc error: code = NotFound desc = could not find container \"7e87aa44dc34b117da005ccbe406770c8e9232b16085f5cd591430268174549b\": container with ID starting with 7e87aa44dc34b117da005ccbe406770c8e9232b16085f5cd591430268174549b not found: ID does not exist" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.634834 4838 scope.go:117] "RemoveContainer" containerID="64440857b2f78a077748d02ca562e0b1d843cf42acb620d2c48b15396ebe0672" Feb 02 10:59:48 crc kubenswrapper[4838]: E0202 10:59:48.635271 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64440857b2f78a077748d02ca562e0b1d843cf42acb620d2c48b15396ebe0672\": container with ID starting with 64440857b2f78a077748d02ca562e0b1d843cf42acb620d2c48b15396ebe0672 not found: ID does not exist" containerID="64440857b2f78a077748d02ca562e0b1d843cf42acb620d2c48b15396ebe0672" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.635298 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64440857b2f78a077748d02ca562e0b1d843cf42acb620d2c48b15396ebe0672"} err="failed to get container status \"64440857b2f78a077748d02ca562e0b1d843cf42acb620d2c48b15396ebe0672\": rpc error: code = NotFound desc = could not find container \"64440857b2f78a077748d02ca562e0b1d843cf42acb620d2c48b15396ebe0672\": container with ID starting with 64440857b2f78a077748d02ca562e0b1d843cf42acb620d2c48b15396ebe0672 not found: ID does not exist" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.635313 4838 scope.go:117] "RemoveContainer" containerID="4bcd58e413a776a9641083c37d859395b5d0a24ade0d69ad3cc133dd07cd6896" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.668181 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tnscg" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.676698 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pl5pl" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.699563 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ntc2n" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.787805 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/193925e0-1419-444b-9e75-ed7371081181-utilities\") pod \"193925e0-1419-444b-9e75-ed7371081181\" (UID: \"193925e0-1419-444b-9e75-ed7371081181\") " Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.787868 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf0d00a9-64a4-46b9-9bc7-8617b0f3692a-catalog-content\") pod \"bf0d00a9-64a4-46b9-9bc7-8617b0f3692a\" (UID: \"bf0d00a9-64a4-46b9-9bc7-8617b0f3692a\") " Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.787897 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gk2l\" (UniqueName: \"kubernetes.io/projected/bf0d00a9-64a4-46b9-9bc7-8617b0f3692a-kube-api-access-5gk2l\") pod \"bf0d00a9-64a4-46b9-9bc7-8617b0f3692a\" (UID: \"bf0d00a9-64a4-46b9-9bc7-8617b0f3692a\") " Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.787951 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/193925e0-1419-444b-9e75-ed7371081181-catalog-content\") pod \"193925e0-1419-444b-9e75-ed7371081181\" (UID: \"193925e0-1419-444b-9e75-ed7371081181\") " Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.787995 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3bffe14a-0216-4854-b0fc-7c482a297b82-marketplace-operator-metrics\") pod \"3bffe14a-0216-4854-b0fc-7c482a297b82\" (UID: \"3bffe14a-0216-4854-b0fc-7c482a297b82\") " Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.788510 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/193925e0-1419-444b-9e75-ed7371081181-utilities" (OuterVolumeSpecName: "utilities") pod "193925e0-1419-444b-9e75-ed7371081181" (UID: "193925e0-1419-444b-9e75-ed7371081181"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.788046 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-487wm\" (UniqueName: \"kubernetes.io/projected/193925e0-1419-444b-9e75-ed7371081181-kube-api-access-487wm\") pod \"193925e0-1419-444b-9e75-ed7371081181\" (UID: \"193925e0-1419-444b-9e75-ed7371081181\") " Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.788785 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3bffe14a-0216-4854-b0fc-7c482a297b82-marketplace-trusted-ca\") pod \"3bffe14a-0216-4854-b0fc-7c482a297b82\" (UID: \"3bffe14a-0216-4854-b0fc-7c482a297b82\") " Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.788875 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf0d00a9-64a4-46b9-9bc7-8617b0f3692a-utilities\") pod \"bf0d00a9-64a4-46b9-9bc7-8617b0f3692a\" (UID: \"bf0d00a9-64a4-46b9-9bc7-8617b0f3692a\") " Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.788918 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n78p9\" (UniqueName: \"kubernetes.io/projected/3bffe14a-0216-4854-b0fc-7c482a297b82-kube-api-access-n78p9\") pod \"3bffe14a-0216-4854-b0fc-7c482a297b82\" (UID: \"3bffe14a-0216-4854-b0fc-7c482a297b82\") " Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.789138 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3bffe14a-0216-4854-b0fc-7c482a297b82-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "3bffe14a-0216-4854-b0fc-7c482a297b82" (UID: "3bffe14a-0216-4854-b0fc-7c482a297b82"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.789224 4838 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3bffe14a-0216-4854-b0fc-7c482a297b82-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.789241 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/193925e0-1419-444b-9e75-ed7371081181-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.789726 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf0d00a9-64a4-46b9-9bc7-8617b0f3692a-utilities" (OuterVolumeSpecName: "utilities") pod "bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" (UID: "bf0d00a9-64a4-46b9-9bc7-8617b0f3692a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.792061 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/193925e0-1419-444b-9e75-ed7371081181-kube-api-access-487wm" (OuterVolumeSpecName: "kube-api-access-487wm") pod "193925e0-1419-444b-9e75-ed7371081181" (UID: "193925e0-1419-444b-9e75-ed7371081181"). InnerVolumeSpecName "kube-api-access-487wm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.792227 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bffe14a-0216-4854-b0fc-7c482a297b82-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "3bffe14a-0216-4854-b0fc-7c482a297b82" (UID: "3bffe14a-0216-4854-b0fc-7c482a297b82"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.794000 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bffe14a-0216-4854-b0fc-7c482a297b82-kube-api-access-n78p9" (OuterVolumeSpecName: "kube-api-access-n78p9") pod "3bffe14a-0216-4854-b0fc-7c482a297b82" (UID: "3bffe14a-0216-4854-b0fc-7c482a297b82"). InnerVolumeSpecName "kube-api-access-n78p9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.797412 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf0d00a9-64a4-46b9-9bc7-8617b0f3692a-kube-api-access-5gk2l" (OuterVolumeSpecName: "kube-api-access-5gk2l") pod "bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" (UID: "bf0d00a9-64a4-46b9-9bc7-8617b0f3692a"). InnerVolumeSpecName "kube-api-access-5gk2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.824117 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf0d00a9-64a4-46b9-9bc7-8617b0f3692a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" (UID: "bf0d00a9-64a4-46b9-9bc7-8617b0f3692a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.890306 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7b6d22c-5441-4f5c-830b-17d67446352d-utilities\") pod \"d7b6d22c-5441-4f5c-830b-17d67446352d\" (UID: \"d7b6d22c-5441-4f5c-830b-17d67446352d\") " Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.890369 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7b6d22c-5441-4f5c-830b-17d67446352d-catalog-content\") pod \"d7b6d22c-5441-4f5c-830b-17d67446352d\" (UID: \"d7b6d22c-5441-4f5c-830b-17d67446352d\") " Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.890480 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6sqx\" (UniqueName: \"kubernetes.io/projected/d7b6d22c-5441-4f5c-830b-17d67446352d-kube-api-access-l6sqx\") pod \"d7b6d22c-5441-4f5c-830b-17d67446352d\" (UID: \"d7b6d22c-5441-4f5c-830b-17d67446352d\") " Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.892153 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf0d00a9-64a4-46b9-9bc7-8617b0f3692a-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.892186 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gk2l\" (UniqueName: \"kubernetes.io/projected/bf0d00a9-64a4-46b9-9bc7-8617b0f3692a-kube-api-access-5gk2l\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.892198 4838 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3bffe14a-0216-4854-b0fc-7c482a297b82-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.892208 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-487wm\" (UniqueName: \"kubernetes.io/projected/193925e0-1419-444b-9e75-ed7371081181-kube-api-access-487wm\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.892221 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf0d00a9-64a4-46b9-9bc7-8617b0f3692a-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.892230 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n78p9\" (UniqueName: \"kubernetes.io/projected/3bffe14a-0216-4854-b0fc-7c482a297b82-kube-api-access-n78p9\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.893090 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7b6d22c-5441-4f5c-830b-17d67446352d-utilities" (OuterVolumeSpecName: "utilities") pod "d7b6d22c-5441-4f5c-830b-17d67446352d" (UID: "d7b6d22c-5441-4f5c-830b-17d67446352d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.900701 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7b6d22c-5441-4f5c-830b-17d67446352d-kube-api-access-l6sqx" (OuterVolumeSpecName: "kube-api-access-l6sqx") pod "d7b6d22c-5441-4f5c-830b-17d67446352d" (UID: "d7b6d22c-5441-4f5c-830b-17d67446352d"). InnerVolumeSpecName "kube-api-access-l6sqx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.930546 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/193925e0-1419-444b-9e75-ed7371081181-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "193925e0-1419-444b-9e75-ed7371081181" (UID: "193925e0-1419-444b-9e75-ed7371081181"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.949781 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7b6d22c-5441-4f5c-830b-17d67446352d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d7b6d22c-5441-4f5c-830b-17d67446352d" (UID: "d7b6d22c-5441-4f5c-830b-17d67446352d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.993012 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/193925e0-1419-444b-9e75-ed7371081181-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.993044 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6sqx\" (UniqueName: \"kubernetes.io/projected/d7b6d22c-5441-4f5c-830b-17d67446352d-kube-api-access-l6sqx\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.993057 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7b6d22c-5441-4f5c-830b-17d67446352d-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:48 crc kubenswrapper[4838]: I0202 10:59:48.993065 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7b6d22c-5441-4f5c-830b-17d67446352d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.524758 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" event={"ID":"3bffe14a-0216-4854-b0fc-7c482a297b82","Type":"ContainerDied","Data":"68c830bee30a244b7319819ac9568d97f6dfb2449661089c4f87cc471140577e"} Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.524776 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-s96vn" Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.524816 4838 scope.go:117] "RemoveContainer" containerID="010ec0a466638dfb9946541c87b10692e080aa0356ad1355b5835782286cc670" Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.527764 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tnscg" event={"ID":"bf0d00a9-64a4-46b9-9bc7-8617b0f3692a","Type":"ContainerDied","Data":"f122af82990d359563cf6cb5a392a9a1d5bf3f3456bb88e8a521523164e10e4b"} Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.527836 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tnscg" Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.529985 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ntc2n" event={"ID":"d7b6d22c-5441-4f5c-830b-17d67446352d","Type":"ContainerDied","Data":"80492cab5db11167cdeca622da236853af59b6e2c7a2d93c3636c18a275831ba"} Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.530029 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ntc2n" Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.533453 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pl5pl" Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.533833 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pl5pl" event={"ID":"193925e0-1419-444b-9e75-ed7371081181","Type":"ContainerDied","Data":"cb0fd35d05f1c52119d9e23e4cfa190b26a8d7fdb76b2626e610cd8ddcb3611a"} Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.536207 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-2kxfd" Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.543561 4838 scope.go:117] "RemoveContainer" containerID="e3c356ae730bee9d068384a6a605b708810afff935b31f6cfb83a350d9effaa5" Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.563914 4838 scope.go:117] "RemoveContainer" containerID="7dcab8d17ec0136d70c2085a28f0bedeb6c35bda3dd71c47db3df4d1b252220e" Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.600386 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pl5pl"] Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.603876 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pl5pl"] Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.608164 4838 scope.go:117] "RemoveContainer" containerID="64f1fdb1daae58afc50ff16c35dd52e00b236820a98593f719611df13be07151" Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.618576 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s96vn"] Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.621683 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s96vn"] Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.635370 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ntc2n"] Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.636765 4838 scope.go:117] "RemoveContainer" containerID="eab2da227dc61318787dee5786dede55b7ba45d35e1fa080d6ebb36b8f39b2f3" Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.640791 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ntc2n"] Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.655657 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tnscg"] Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.656588 4838 scope.go:117] "RemoveContainer" containerID="67c288d5fe396bb70a6fc6d7ff09a745c878afe646b0cf0f55d69404465cac8e" Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.658974 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tnscg"] Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.676956 4838 scope.go:117] "RemoveContainer" containerID="ef5c2acb191b6d35c0620e7e5eb83d390271f42c47881fb33c65b1f6892a0c8c" Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.688297 4838 scope.go:117] "RemoveContainer" containerID="1ccf99eff81a9257eb4c7e3fe871e1b87b43262f9d7034992255a0f579ebe3cf" Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.704848 4838 scope.go:117] "RemoveContainer" containerID="a5b683382415401dafe8670fcc9267e53809e00f5090c75867acf5f19006ce41" Feb 02 10:59:49 crc kubenswrapper[4838]: I0202 10:59:49.720179 4838 scope.go:117] "RemoveContainer" containerID="ab6cac03f617f2fdb0b4417bb05817eee8926f6fd951af91f6ce45b4e4aeca88" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.410512 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tdj7s"] Feb 02 10:59:50 crc kubenswrapper[4838]: E0202 10:59:50.411003 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdb7ade3-e1b6-436b-a5df-3abb972b72fa" containerName="extract-content" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.411020 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdb7ade3-e1b6-436b-a5df-3abb972b72fa" containerName="extract-content" Feb 02 10:59:50 crc kubenswrapper[4838]: E0202 10:59:50.411029 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" containerName="extract-content" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.411036 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" containerName="extract-content" Feb 02 10:59:50 crc kubenswrapper[4838]: E0202 10:59:50.411044 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7b6d22c-5441-4f5c-830b-17d67446352d" containerName="extract-utilities" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.411051 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7b6d22c-5441-4f5c-830b-17d67446352d" containerName="extract-utilities" Feb 02 10:59:50 crc kubenswrapper[4838]: E0202 10:59:50.411060 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bffe14a-0216-4854-b0fc-7c482a297b82" containerName="marketplace-operator" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.411066 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bffe14a-0216-4854-b0fc-7c482a297b82" containerName="marketplace-operator" Feb 02 10:59:50 crc kubenswrapper[4838]: E0202 10:59:50.411073 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdb7ade3-e1b6-436b-a5df-3abb972b72fa" containerName="extract-utilities" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.411079 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdb7ade3-e1b6-436b-a5df-3abb972b72fa" containerName="extract-utilities" Feb 02 10:59:50 crc kubenswrapper[4838]: E0202 10:59:50.411090 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="193925e0-1419-444b-9e75-ed7371081181" containerName="registry-server" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.411097 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="193925e0-1419-444b-9e75-ed7371081181" containerName="registry-server" Feb 02 10:59:50 crc kubenswrapper[4838]: E0202 10:59:50.411106 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" containerName="extract-utilities" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.411112 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" containerName="extract-utilities" Feb 02 10:59:50 crc kubenswrapper[4838]: E0202 10:59:50.411119 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="193925e0-1419-444b-9e75-ed7371081181" containerName="extract-content" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.411124 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="193925e0-1419-444b-9e75-ed7371081181" containerName="extract-content" Feb 02 10:59:50 crc kubenswrapper[4838]: E0202 10:59:50.411135 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7b6d22c-5441-4f5c-830b-17d67446352d" containerName="extract-content" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.411140 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7b6d22c-5441-4f5c-830b-17d67446352d" containerName="extract-content" Feb 02 10:59:50 crc kubenswrapper[4838]: E0202 10:59:50.411148 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7b6d22c-5441-4f5c-830b-17d67446352d" containerName="registry-server" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.411154 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7b6d22c-5441-4f5c-830b-17d67446352d" containerName="registry-server" Feb 02 10:59:50 crc kubenswrapper[4838]: E0202 10:59:50.411166 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" containerName="registry-server" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.411174 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" containerName="registry-server" Feb 02 10:59:50 crc kubenswrapper[4838]: E0202 10:59:50.411183 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdb7ade3-e1b6-436b-a5df-3abb972b72fa" containerName="registry-server" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.411189 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdb7ade3-e1b6-436b-a5df-3abb972b72fa" containerName="registry-server" Feb 02 10:59:50 crc kubenswrapper[4838]: E0202 10:59:50.411196 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="193925e0-1419-444b-9e75-ed7371081181" containerName="extract-utilities" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.411202 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="193925e0-1419-444b-9e75-ed7371081181" containerName="extract-utilities" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.411284 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdb7ade3-e1b6-436b-a5df-3abb972b72fa" containerName="registry-server" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.411295 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" containerName="registry-server" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.411302 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bffe14a-0216-4854-b0fc-7c482a297b82" containerName="marketplace-operator" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.411311 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="193925e0-1419-444b-9e75-ed7371081181" containerName="registry-server" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.411321 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7b6d22c-5441-4f5c-830b-17d67446352d" containerName="registry-server" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.411329 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bffe14a-0216-4854-b0fc-7c482a297b82" containerName="marketplace-operator" Feb 02 10:59:50 crc kubenswrapper[4838]: E0202 10:59:50.411408 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bffe14a-0216-4854-b0fc-7c482a297b82" containerName="marketplace-operator" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.411415 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bffe14a-0216-4854-b0fc-7c482a297b82" containerName="marketplace-operator" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.412035 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tdj7s" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.413962 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.415104 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tdj7s"] Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.514703 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="193925e0-1419-444b-9e75-ed7371081181" path="/var/lib/kubelet/pods/193925e0-1419-444b-9e75-ed7371081181/volumes" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.515926 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3bffe14a-0216-4854-b0fc-7c482a297b82" path="/var/lib/kubelet/pods/3bffe14a-0216-4854-b0fc-7c482a297b82/volumes" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.516151 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6da618e6-95eb-478d-a290-fb44dfef06f7-catalog-content\") pod \"certified-operators-tdj7s\" (UID: \"6da618e6-95eb-478d-a290-fb44dfef06f7\") " pod="openshift-marketplace/certified-operators-tdj7s" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.516224 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6da618e6-95eb-478d-a290-fb44dfef06f7-utilities\") pod \"certified-operators-tdj7s\" (UID: \"6da618e6-95eb-478d-a290-fb44dfef06f7\") " pod="openshift-marketplace/certified-operators-tdj7s" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.516264 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bqk2\" (UniqueName: \"kubernetes.io/projected/6da618e6-95eb-478d-a290-fb44dfef06f7-kube-api-access-6bqk2\") pod \"certified-operators-tdj7s\" (UID: \"6da618e6-95eb-478d-a290-fb44dfef06f7\") " pod="openshift-marketplace/certified-operators-tdj7s" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.516595 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf0d00a9-64a4-46b9-9bc7-8617b0f3692a" path="/var/lib/kubelet/pods/bf0d00a9-64a4-46b9-9bc7-8617b0f3692a/volumes" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.517989 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdb7ade3-e1b6-436b-a5df-3abb972b72fa" path="/var/lib/kubelet/pods/cdb7ade3-e1b6-436b-a5df-3abb972b72fa/volumes" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.518844 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7b6d22c-5441-4f5c-830b-17d67446352d" path="/var/lib/kubelet/pods/d7b6d22c-5441-4f5c-830b-17d67446352d/volumes" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.617126 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6da618e6-95eb-478d-a290-fb44dfef06f7-catalog-content\") pod \"certified-operators-tdj7s\" (UID: \"6da618e6-95eb-478d-a290-fb44dfef06f7\") " pod="openshift-marketplace/certified-operators-tdj7s" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.617184 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6da618e6-95eb-478d-a290-fb44dfef06f7-utilities\") pod \"certified-operators-tdj7s\" (UID: \"6da618e6-95eb-478d-a290-fb44dfef06f7\") " pod="openshift-marketplace/certified-operators-tdj7s" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.617211 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bqk2\" (UniqueName: \"kubernetes.io/projected/6da618e6-95eb-478d-a290-fb44dfef06f7-kube-api-access-6bqk2\") pod \"certified-operators-tdj7s\" (UID: \"6da618e6-95eb-478d-a290-fb44dfef06f7\") " pod="openshift-marketplace/certified-operators-tdj7s" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.617782 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6da618e6-95eb-478d-a290-fb44dfef06f7-catalog-content\") pod \"certified-operators-tdj7s\" (UID: \"6da618e6-95eb-478d-a290-fb44dfef06f7\") " pod="openshift-marketplace/certified-operators-tdj7s" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.617957 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6da618e6-95eb-478d-a290-fb44dfef06f7-utilities\") pod \"certified-operators-tdj7s\" (UID: \"6da618e6-95eb-478d-a290-fb44dfef06f7\") " pod="openshift-marketplace/certified-operators-tdj7s" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.634513 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bqk2\" (UniqueName: \"kubernetes.io/projected/6da618e6-95eb-478d-a290-fb44dfef06f7-kube-api-access-6bqk2\") pod \"certified-operators-tdj7s\" (UID: \"6da618e6-95eb-478d-a290-fb44dfef06f7\") " pod="openshift-marketplace/certified-operators-tdj7s" Feb 02 10:59:50 crc kubenswrapper[4838]: I0202 10:59:50.736078 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tdj7s" Feb 02 10:59:51 crc kubenswrapper[4838]: I0202 10:59:51.208240 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tdj7s"] Feb 02 10:59:51 crc kubenswrapper[4838]: W0202 10:59:51.216636 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6da618e6_95eb_478d_a290_fb44dfef06f7.slice/crio-af60370a95f663e60b6bd57342217f2bc06491491f8d5c053a2834ceb674d7d2 WatchSource:0}: Error finding container af60370a95f663e60b6bd57342217f2bc06491491f8d5c053a2834ceb674d7d2: Status 404 returned error can't find the container with id af60370a95f663e60b6bd57342217f2bc06491491f8d5c053a2834ceb674d7d2 Feb 02 10:59:51 crc kubenswrapper[4838]: I0202 10:59:51.549314 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tdj7s" event={"ID":"6da618e6-95eb-478d-a290-fb44dfef06f7","Type":"ContainerStarted","Data":"c97d11ae514128be43a04e839d5cfe1b69abbc6e355c8983059c2a99a7365800"} Feb 02 10:59:51 crc kubenswrapper[4838]: I0202 10:59:51.549725 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tdj7s" event={"ID":"6da618e6-95eb-478d-a290-fb44dfef06f7","Type":"ContainerStarted","Data":"af60370a95f663e60b6bd57342217f2bc06491491f8d5c053a2834ceb674d7d2"} Feb 02 10:59:51 crc kubenswrapper[4838]: I0202 10:59:51.810634 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-62q4t"] Feb 02 10:59:51 crc kubenswrapper[4838]: I0202 10:59:51.811729 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-62q4t" Feb 02 10:59:51 crc kubenswrapper[4838]: I0202 10:59:51.821063 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-62q4t"] Feb 02 10:59:51 crc kubenswrapper[4838]: I0202 10:59:51.821193 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 02 10:59:51 crc kubenswrapper[4838]: I0202 10:59:51.931295 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71ec42fd-382d-43bd-9353-24e15ac2e795-utilities\") pod \"redhat-operators-62q4t\" (UID: \"71ec42fd-382d-43bd-9353-24e15ac2e795\") " pod="openshift-marketplace/redhat-operators-62q4t" Feb 02 10:59:51 crc kubenswrapper[4838]: I0202 10:59:51.931397 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whkcj\" (UniqueName: \"kubernetes.io/projected/71ec42fd-382d-43bd-9353-24e15ac2e795-kube-api-access-whkcj\") pod \"redhat-operators-62q4t\" (UID: \"71ec42fd-382d-43bd-9353-24e15ac2e795\") " pod="openshift-marketplace/redhat-operators-62q4t" Feb 02 10:59:51 crc kubenswrapper[4838]: I0202 10:59:51.931430 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71ec42fd-382d-43bd-9353-24e15ac2e795-catalog-content\") pod \"redhat-operators-62q4t\" (UID: \"71ec42fd-382d-43bd-9353-24e15ac2e795\") " pod="openshift-marketplace/redhat-operators-62q4t" Feb 02 10:59:52 crc kubenswrapper[4838]: I0202 10:59:52.033217 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whkcj\" (UniqueName: \"kubernetes.io/projected/71ec42fd-382d-43bd-9353-24e15ac2e795-kube-api-access-whkcj\") pod \"redhat-operators-62q4t\" (UID: \"71ec42fd-382d-43bd-9353-24e15ac2e795\") " pod="openshift-marketplace/redhat-operators-62q4t" Feb 02 10:59:52 crc kubenswrapper[4838]: I0202 10:59:52.033288 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71ec42fd-382d-43bd-9353-24e15ac2e795-catalog-content\") pod \"redhat-operators-62q4t\" (UID: \"71ec42fd-382d-43bd-9353-24e15ac2e795\") " pod="openshift-marketplace/redhat-operators-62q4t" Feb 02 10:59:52 crc kubenswrapper[4838]: I0202 10:59:52.033358 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71ec42fd-382d-43bd-9353-24e15ac2e795-utilities\") pod \"redhat-operators-62q4t\" (UID: \"71ec42fd-382d-43bd-9353-24e15ac2e795\") " pod="openshift-marketplace/redhat-operators-62q4t" Feb 02 10:59:52 crc kubenswrapper[4838]: I0202 10:59:52.033840 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71ec42fd-382d-43bd-9353-24e15ac2e795-utilities\") pod \"redhat-operators-62q4t\" (UID: \"71ec42fd-382d-43bd-9353-24e15ac2e795\") " pod="openshift-marketplace/redhat-operators-62q4t" Feb 02 10:59:52 crc kubenswrapper[4838]: I0202 10:59:52.034803 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71ec42fd-382d-43bd-9353-24e15ac2e795-catalog-content\") pod \"redhat-operators-62q4t\" (UID: \"71ec42fd-382d-43bd-9353-24e15ac2e795\") " pod="openshift-marketplace/redhat-operators-62q4t" Feb 02 10:59:52 crc kubenswrapper[4838]: I0202 10:59:52.059032 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whkcj\" (UniqueName: \"kubernetes.io/projected/71ec42fd-382d-43bd-9353-24e15ac2e795-kube-api-access-whkcj\") pod \"redhat-operators-62q4t\" (UID: \"71ec42fd-382d-43bd-9353-24e15ac2e795\") " pod="openshift-marketplace/redhat-operators-62q4t" Feb 02 10:59:52 crc kubenswrapper[4838]: I0202 10:59:52.127674 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-62q4t" Feb 02 10:59:52 crc kubenswrapper[4838]: I0202 10:59:52.519865 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-62q4t"] Feb 02 10:59:52 crc kubenswrapper[4838]: W0202 10:59:52.525452 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71ec42fd_382d_43bd_9353_24e15ac2e795.slice/crio-8f8133cfb3c6ab6410a0ef9be653074ae5373d4847e6441b7c904a6050c54972 WatchSource:0}: Error finding container 8f8133cfb3c6ab6410a0ef9be653074ae5373d4847e6441b7c904a6050c54972: Status 404 returned error can't find the container with id 8f8133cfb3c6ab6410a0ef9be653074ae5373d4847e6441b7c904a6050c54972 Feb 02 10:59:52 crc kubenswrapper[4838]: I0202 10:59:52.557567 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-62q4t" event={"ID":"71ec42fd-382d-43bd-9353-24e15ac2e795","Type":"ContainerStarted","Data":"8f8133cfb3c6ab6410a0ef9be653074ae5373d4847e6441b7c904a6050c54972"} Feb 02 10:59:52 crc kubenswrapper[4838]: I0202 10:59:52.559182 4838 generic.go:334] "Generic (PLEG): container finished" podID="6da618e6-95eb-478d-a290-fb44dfef06f7" containerID="c97d11ae514128be43a04e839d5cfe1b69abbc6e355c8983059c2a99a7365800" exitCode=0 Feb 02 10:59:52 crc kubenswrapper[4838]: I0202 10:59:52.559211 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tdj7s" event={"ID":"6da618e6-95eb-478d-a290-fb44dfef06f7","Type":"ContainerDied","Data":"c97d11ae514128be43a04e839d5cfe1b69abbc6e355c8983059c2a99a7365800"} Feb 02 10:59:52 crc kubenswrapper[4838]: I0202 10:59:52.805281 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dcrh2"] Feb 02 10:59:52 crc kubenswrapper[4838]: I0202 10:59:52.806248 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dcrh2" Feb 02 10:59:52 crc kubenswrapper[4838]: I0202 10:59:52.807766 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 02 10:59:52 crc kubenswrapper[4838]: I0202 10:59:52.812597 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dcrh2"] Feb 02 10:59:52 crc kubenswrapper[4838]: I0202 10:59:52.942684 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d8db2f1-c01c-4848-923a-f4fb42f7d2be-utilities\") pod \"community-operators-dcrh2\" (UID: \"8d8db2f1-c01c-4848-923a-f4fb42f7d2be\") " pod="openshift-marketplace/community-operators-dcrh2" Feb 02 10:59:52 crc kubenswrapper[4838]: I0202 10:59:52.942864 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d8db2f1-c01c-4848-923a-f4fb42f7d2be-catalog-content\") pod \"community-operators-dcrh2\" (UID: \"8d8db2f1-c01c-4848-923a-f4fb42f7d2be\") " pod="openshift-marketplace/community-operators-dcrh2" Feb 02 10:59:52 crc kubenswrapper[4838]: I0202 10:59:52.942966 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75fhw\" (UniqueName: \"kubernetes.io/projected/8d8db2f1-c01c-4848-923a-f4fb42f7d2be-kube-api-access-75fhw\") pod \"community-operators-dcrh2\" (UID: \"8d8db2f1-c01c-4848-923a-f4fb42f7d2be\") " pod="openshift-marketplace/community-operators-dcrh2" Feb 02 10:59:53 crc kubenswrapper[4838]: I0202 10:59:53.044371 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d8db2f1-c01c-4848-923a-f4fb42f7d2be-catalog-content\") pod \"community-operators-dcrh2\" (UID: \"8d8db2f1-c01c-4848-923a-f4fb42f7d2be\") " pod="openshift-marketplace/community-operators-dcrh2" Feb 02 10:59:53 crc kubenswrapper[4838]: I0202 10:59:53.044433 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75fhw\" (UniqueName: \"kubernetes.io/projected/8d8db2f1-c01c-4848-923a-f4fb42f7d2be-kube-api-access-75fhw\") pod \"community-operators-dcrh2\" (UID: \"8d8db2f1-c01c-4848-923a-f4fb42f7d2be\") " pod="openshift-marketplace/community-operators-dcrh2" Feb 02 10:59:53 crc kubenswrapper[4838]: I0202 10:59:53.044478 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d8db2f1-c01c-4848-923a-f4fb42f7d2be-utilities\") pod \"community-operators-dcrh2\" (UID: \"8d8db2f1-c01c-4848-923a-f4fb42f7d2be\") " pod="openshift-marketplace/community-operators-dcrh2" Feb 02 10:59:53 crc kubenswrapper[4838]: I0202 10:59:53.044872 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d8db2f1-c01c-4848-923a-f4fb42f7d2be-catalog-content\") pod \"community-operators-dcrh2\" (UID: \"8d8db2f1-c01c-4848-923a-f4fb42f7d2be\") " pod="openshift-marketplace/community-operators-dcrh2" Feb 02 10:59:53 crc kubenswrapper[4838]: I0202 10:59:53.044904 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d8db2f1-c01c-4848-923a-f4fb42f7d2be-utilities\") pod \"community-operators-dcrh2\" (UID: \"8d8db2f1-c01c-4848-923a-f4fb42f7d2be\") " pod="openshift-marketplace/community-operators-dcrh2" Feb 02 10:59:53 crc kubenswrapper[4838]: I0202 10:59:53.091029 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75fhw\" (UniqueName: \"kubernetes.io/projected/8d8db2f1-c01c-4848-923a-f4fb42f7d2be-kube-api-access-75fhw\") pod \"community-operators-dcrh2\" (UID: \"8d8db2f1-c01c-4848-923a-f4fb42f7d2be\") " pod="openshift-marketplace/community-operators-dcrh2" Feb 02 10:59:53 crc kubenswrapper[4838]: I0202 10:59:53.139492 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dcrh2" Feb 02 10:59:53 crc kubenswrapper[4838]: I0202 10:59:53.566511 4838 generic.go:334] "Generic (PLEG): container finished" podID="71ec42fd-382d-43bd-9353-24e15ac2e795" containerID="a3506cfd0d878b3c952bc1fcb6763b970cf9b768fd91589d60966f631a786cd7" exitCode=0 Feb 02 10:59:53 crc kubenswrapper[4838]: I0202 10:59:53.566980 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-62q4t" event={"ID":"71ec42fd-382d-43bd-9353-24e15ac2e795","Type":"ContainerDied","Data":"a3506cfd0d878b3c952bc1fcb6763b970cf9b768fd91589d60966f631a786cd7"} Feb 02 10:59:53 crc kubenswrapper[4838]: I0202 10:59:53.568257 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dcrh2"] Feb 02 10:59:53 crc kubenswrapper[4838]: I0202 10:59:53.570579 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tdj7s" event={"ID":"6da618e6-95eb-478d-a290-fb44dfef06f7","Type":"ContainerStarted","Data":"adddae18fd9cc3bce455ea0a57184c1d19f7ce6bef332b806d3892b491a1989d"} Feb 02 10:59:53 crc kubenswrapper[4838]: W0202 10:59:53.571900 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8d8db2f1_c01c_4848_923a_f4fb42f7d2be.slice/crio-3d1506a766ae394fc2881dffe847719a0e5e0cbe00d7f68c0397d6bca0044b8c WatchSource:0}: Error finding container 3d1506a766ae394fc2881dffe847719a0e5e0cbe00d7f68c0397d6bca0044b8c: Status 404 returned error can't find the container with id 3d1506a766ae394fc2881dffe847719a0e5e0cbe00d7f68c0397d6bca0044b8c Feb 02 10:59:54 crc kubenswrapper[4838]: I0202 10:59:54.207834 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2f7cb"] Feb 02 10:59:54 crc kubenswrapper[4838]: I0202 10:59:54.209075 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2f7cb" Feb 02 10:59:54 crc kubenswrapper[4838]: I0202 10:59:54.211846 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 02 10:59:54 crc kubenswrapper[4838]: I0202 10:59:54.236718 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2f7cb"] Feb 02 10:59:54 crc kubenswrapper[4838]: I0202 10:59:54.362563 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5cef155-b12d-4e9b-81b7-9a224b8fe5b3-utilities\") pod \"redhat-marketplace-2f7cb\" (UID: \"d5cef155-b12d-4e9b-81b7-9a224b8fe5b3\") " pod="openshift-marketplace/redhat-marketplace-2f7cb" Feb 02 10:59:54 crc kubenswrapper[4838]: I0202 10:59:54.362705 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bpg4\" (UniqueName: \"kubernetes.io/projected/d5cef155-b12d-4e9b-81b7-9a224b8fe5b3-kube-api-access-9bpg4\") pod \"redhat-marketplace-2f7cb\" (UID: \"d5cef155-b12d-4e9b-81b7-9a224b8fe5b3\") " pod="openshift-marketplace/redhat-marketplace-2f7cb" Feb 02 10:59:54 crc kubenswrapper[4838]: I0202 10:59:54.362768 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5cef155-b12d-4e9b-81b7-9a224b8fe5b3-catalog-content\") pod \"redhat-marketplace-2f7cb\" (UID: \"d5cef155-b12d-4e9b-81b7-9a224b8fe5b3\") " pod="openshift-marketplace/redhat-marketplace-2f7cb" Feb 02 10:59:54 crc kubenswrapper[4838]: I0202 10:59:54.463733 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5cef155-b12d-4e9b-81b7-9a224b8fe5b3-catalog-content\") pod \"redhat-marketplace-2f7cb\" (UID: \"d5cef155-b12d-4e9b-81b7-9a224b8fe5b3\") " pod="openshift-marketplace/redhat-marketplace-2f7cb" Feb 02 10:59:54 crc kubenswrapper[4838]: I0202 10:59:54.463835 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5cef155-b12d-4e9b-81b7-9a224b8fe5b3-utilities\") pod \"redhat-marketplace-2f7cb\" (UID: \"d5cef155-b12d-4e9b-81b7-9a224b8fe5b3\") " pod="openshift-marketplace/redhat-marketplace-2f7cb" Feb 02 10:59:54 crc kubenswrapper[4838]: I0202 10:59:54.463877 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bpg4\" (UniqueName: \"kubernetes.io/projected/d5cef155-b12d-4e9b-81b7-9a224b8fe5b3-kube-api-access-9bpg4\") pod \"redhat-marketplace-2f7cb\" (UID: \"d5cef155-b12d-4e9b-81b7-9a224b8fe5b3\") " pod="openshift-marketplace/redhat-marketplace-2f7cb" Feb 02 10:59:54 crc kubenswrapper[4838]: I0202 10:59:54.464431 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5cef155-b12d-4e9b-81b7-9a224b8fe5b3-catalog-content\") pod \"redhat-marketplace-2f7cb\" (UID: \"d5cef155-b12d-4e9b-81b7-9a224b8fe5b3\") " pod="openshift-marketplace/redhat-marketplace-2f7cb" Feb 02 10:59:54 crc kubenswrapper[4838]: I0202 10:59:54.464453 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5cef155-b12d-4e9b-81b7-9a224b8fe5b3-utilities\") pod \"redhat-marketplace-2f7cb\" (UID: \"d5cef155-b12d-4e9b-81b7-9a224b8fe5b3\") " pod="openshift-marketplace/redhat-marketplace-2f7cb" Feb 02 10:59:54 crc kubenswrapper[4838]: I0202 10:59:54.487551 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bpg4\" (UniqueName: \"kubernetes.io/projected/d5cef155-b12d-4e9b-81b7-9a224b8fe5b3-kube-api-access-9bpg4\") pod \"redhat-marketplace-2f7cb\" (UID: \"d5cef155-b12d-4e9b-81b7-9a224b8fe5b3\") " pod="openshift-marketplace/redhat-marketplace-2f7cb" Feb 02 10:59:54 crc kubenswrapper[4838]: I0202 10:59:54.530649 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2f7cb" Feb 02 10:59:54 crc kubenswrapper[4838]: I0202 10:59:54.578479 4838 generic.go:334] "Generic (PLEG): container finished" podID="6da618e6-95eb-478d-a290-fb44dfef06f7" containerID="adddae18fd9cc3bce455ea0a57184c1d19f7ce6bef332b806d3892b491a1989d" exitCode=0 Feb 02 10:59:54 crc kubenswrapper[4838]: I0202 10:59:54.578553 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tdj7s" event={"ID":"6da618e6-95eb-478d-a290-fb44dfef06f7","Type":"ContainerDied","Data":"adddae18fd9cc3bce455ea0a57184c1d19f7ce6bef332b806d3892b491a1989d"} Feb 02 10:59:54 crc kubenswrapper[4838]: I0202 10:59:54.581179 4838 generic.go:334] "Generic (PLEG): container finished" podID="8d8db2f1-c01c-4848-923a-f4fb42f7d2be" containerID="ea1bcb852b3431e948bab1a1369fb7175b0c100ea64af8116550fc6262a0b25a" exitCode=0 Feb 02 10:59:54 crc kubenswrapper[4838]: I0202 10:59:54.581242 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dcrh2" event={"ID":"8d8db2f1-c01c-4848-923a-f4fb42f7d2be","Type":"ContainerDied","Data":"ea1bcb852b3431e948bab1a1369fb7175b0c100ea64af8116550fc6262a0b25a"} Feb 02 10:59:54 crc kubenswrapper[4838]: I0202 10:59:54.581272 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dcrh2" event={"ID":"8d8db2f1-c01c-4848-923a-f4fb42f7d2be","Type":"ContainerStarted","Data":"3d1506a766ae394fc2881dffe847719a0e5e0cbe00d7f68c0397d6bca0044b8c"} Feb 02 10:59:54 crc kubenswrapper[4838]: I0202 10:59:54.924789 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2f7cb"] Feb 02 10:59:55 crc kubenswrapper[4838]: I0202 10:59:55.587656 4838 generic.go:334] "Generic (PLEG): container finished" podID="d5cef155-b12d-4e9b-81b7-9a224b8fe5b3" containerID="1f5a0afbdf93bf36266a092cacffc7710dea9088485eb180623276a0daf2ffe1" exitCode=0 Feb 02 10:59:55 crc kubenswrapper[4838]: I0202 10:59:55.587929 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2f7cb" event={"ID":"d5cef155-b12d-4e9b-81b7-9a224b8fe5b3","Type":"ContainerDied","Data":"1f5a0afbdf93bf36266a092cacffc7710dea9088485eb180623276a0daf2ffe1"} Feb 02 10:59:55 crc kubenswrapper[4838]: I0202 10:59:55.588161 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2f7cb" event={"ID":"d5cef155-b12d-4e9b-81b7-9a224b8fe5b3","Type":"ContainerStarted","Data":"e5d11ab6288f5ef50583b1012f389cc21fdea233a7e58d92622468bd7bc73107"} Feb 02 10:59:55 crc kubenswrapper[4838]: I0202 10:59:55.594800 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-62q4t" event={"ID":"71ec42fd-382d-43bd-9353-24e15ac2e795","Type":"ContainerStarted","Data":"c1e98e5d859710147c683ffb1f3164679d28e6f4b453fac3757d17892a285fc2"} Feb 02 10:59:56 crc kubenswrapper[4838]: I0202 10:59:56.607544 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tdj7s" event={"ID":"6da618e6-95eb-478d-a290-fb44dfef06f7","Type":"ContainerStarted","Data":"9e47a271d19b4135edf9c983cb105ea9b4f22b97a88905b3c553fbc22f6744ad"} Feb 02 10:59:56 crc kubenswrapper[4838]: I0202 10:59:56.609218 4838 generic.go:334] "Generic (PLEG): container finished" podID="71ec42fd-382d-43bd-9353-24e15ac2e795" containerID="c1e98e5d859710147c683ffb1f3164679d28e6f4b453fac3757d17892a285fc2" exitCode=0 Feb 02 10:59:56 crc kubenswrapper[4838]: I0202 10:59:56.609302 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-62q4t" event={"ID":"71ec42fd-382d-43bd-9353-24e15ac2e795","Type":"ContainerDied","Data":"c1e98e5d859710147c683ffb1f3164679d28e6f4b453fac3757d17892a285fc2"} Feb 02 10:59:56 crc kubenswrapper[4838]: I0202 10:59:56.612442 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dcrh2" event={"ID":"8d8db2f1-c01c-4848-923a-f4fb42f7d2be","Type":"ContainerStarted","Data":"a0a9e90b123db84fbc12cbb44ef51734a753506c2867ac99dfc93648a38e61a1"} Feb 02 10:59:56 crc kubenswrapper[4838]: I0202 10:59:56.626881 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tdj7s" podStartSLOduration=3.821944203 podStartE2EDuration="6.626864389s" podCreationTimestamp="2026-02-02 10:59:50 +0000 UTC" firstStartedPulling="2026-02-02 10:59:52.560769109 +0000 UTC m=+386.897870147" lastFinishedPulling="2026-02-02 10:59:55.365689305 +0000 UTC m=+389.702790333" observedRunningTime="2026-02-02 10:59:56.622813631 +0000 UTC m=+390.959914679" watchObservedRunningTime="2026-02-02 10:59:56.626864389 +0000 UTC m=+390.963965427" Feb 02 10:59:57 crc kubenswrapper[4838]: I0202 10:59:57.620809 4838 generic.go:334] "Generic (PLEG): container finished" podID="8d8db2f1-c01c-4848-923a-f4fb42f7d2be" containerID="a0a9e90b123db84fbc12cbb44ef51734a753506c2867ac99dfc93648a38e61a1" exitCode=0 Feb 02 10:59:57 crc kubenswrapper[4838]: I0202 10:59:57.620953 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dcrh2" event={"ID":"8d8db2f1-c01c-4848-923a-f4fb42f7d2be","Type":"ContainerDied","Data":"a0a9e90b123db84fbc12cbb44ef51734a753506c2867ac99dfc93648a38e61a1"} Feb 02 10:59:58 crc kubenswrapper[4838]: I0202 10:59:58.628383 4838 generic.go:334] "Generic (PLEG): container finished" podID="d5cef155-b12d-4e9b-81b7-9a224b8fe5b3" containerID="e6396b05fb7e7b39b9386876b6831af3a8018cfc335ebfd6304b078dc312946a" exitCode=0 Feb 02 10:59:58 crc kubenswrapper[4838]: I0202 10:59:58.628663 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2f7cb" event={"ID":"d5cef155-b12d-4e9b-81b7-9a224b8fe5b3","Type":"ContainerDied","Data":"e6396b05fb7e7b39b9386876b6831af3a8018cfc335ebfd6304b078dc312946a"} Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.173650 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz"] Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.175013 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz" Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.177218 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.182968 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.187215 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz"] Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.357774 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/05d7eca8-60b7-46dc-bdf0-510d63e525e6-config-volume\") pod \"collect-profiles-29500500-msftz\" (UID: \"05d7eca8-60b7-46dc-bdf0-510d63e525e6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz" Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.358015 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/05d7eca8-60b7-46dc-bdf0-510d63e525e6-secret-volume\") pod \"collect-profiles-29500500-msftz\" (UID: \"05d7eca8-60b7-46dc-bdf0-510d63e525e6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz" Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.358160 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fkxg\" (UniqueName: \"kubernetes.io/projected/05d7eca8-60b7-46dc-bdf0-510d63e525e6-kube-api-access-5fkxg\") pod \"collect-profiles-29500500-msftz\" (UID: \"05d7eca8-60b7-46dc-bdf0-510d63e525e6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz" Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.459146 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/05d7eca8-60b7-46dc-bdf0-510d63e525e6-secret-volume\") pod \"collect-profiles-29500500-msftz\" (UID: \"05d7eca8-60b7-46dc-bdf0-510d63e525e6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz" Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.459196 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fkxg\" (UniqueName: \"kubernetes.io/projected/05d7eca8-60b7-46dc-bdf0-510d63e525e6-kube-api-access-5fkxg\") pod \"collect-profiles-29500500-msftz\" (UID: \"05d7eca8-60b7-46dc-bdf0-510d63e525e6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz" Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.459218 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/05d7eca8-60b7-46dc-bdf0-510d63e525e6-config-volume\") pod \"collect-profiles-29500500-msftz\" (UID: \"05d7eca8-60b7-46dc-bdf0-510d63e525e6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz" Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.460043 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/05d7eca8-60b7-46dc-bdf0-510d63e525e6-config-volume\") pod \"collect-profiles-29500500-msftz\" (UID: \"05d7eca8-60b7-46dc-bdf0-510d63e525e6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz" Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.465152 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/05d7eca8-60b7-46dc-bdf0-510d63e525e6-secret-volume\") pod \"collect-profiles-29500500-msftz\" (UID: \"05d7eca8-60b7-46dc-bdf0-510d63e525e6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz" Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.477970 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fkxg\" (UniqueName: \"kubernetes.io/projected/05d7eca8-60b7-46dc-bdf0-510d63e525e6-kube-api-access-5fkxg\") pod \"collect-profiles-29500500-msftz\" (UID: \"05d7eca8-60b7-46dc-bdf0-510d63e525e6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz" Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.504721 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz" Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.653430 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dcrh2" event={"ID":"8d8db2f1-c01c-4848-923a-f4fb42f7d2be","Type":"ContainerStarted","Data":"ea51924d28912ce95dae53ec95e70ed5bfd9869a7a22fea940ddded67b4dfb5e"} Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.657928 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-62q4t" event={"ID":"71ec42fd-382d-43bd-9353-24e15ac2e795","Type":"ContainerStarted","Data":"07c84ffc029f188637aeda3bd7d72b8a9666b2d449d0beea40d7f79fb64b4f18"} Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.675219 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dcrh2" podStartSLOduration=3.410076616 podStartE2EDuration="8.675201905s" podCreationTimestamp="2026-02-02 10:59:52 +0000 UTC" firstStartedPulling="2026-02-02 10:59:54.582564739 +0000 UTC m=+388.919665767" lastFinishedPulling="2026-02-02 10:59:59.847690028 +0000 UTC m=+394.184791056" observedRunningTime="2026-02-02 11:00:00.67239633 +0000 UTC m=+395.009497358" watchObservedRunningTime="2026-02-02 11:00:00.675201905 +0000 UTC m=+395.012302933" Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.693563 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-62q4t" podStartSLOduration=3.593455129 podStartE2EDuration="9.693543734s" podCreationTimestamp="2026-02-02 10:59:51 +0000 UTC" firstStartedPulling="2026-02-02 10:59:53.570078161 +0000 UTC m=+387.907179199" lastFinishedPulling="2026-02-02 10:59:59.670166766 +0000 UTC m=+394.007267804" observedRunningTime="2026-02-02 11:00:00.692820454 +0000 UTC m=+395.029921482" watchObservedRunningTime="2026-02-02 11:00:00.693543734 +0000 UTC m=+395.030644772" Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.737573 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tdj7s" Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.738603 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tdj7s" Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.743145 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz"] Feb 02 11:00:00 crc kubenswrapper[4838]: I0202 11:00:00.793859 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tdj7s" Feb 02 11:00:01 crc kubenswrapper[4838]: I0202 11:00:01.665099 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz" event={"ID":"05d7eca8-60b7-46dc-bdf0-510d63e525e6","Type":"ContainerStarted","Data":"321d34dac8b8e4a0bbf874a99a057dd2186b893be617243b10f78e1e92bd3b98"} Feb 02 11:00:01 crc kubenswrapper[4838]: I0202 11:00:01.665390 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz" event={"ID":"05d7eca8-60b7-46dc-bdf0-510d63e525e6","Type":"ContainerStarted","Data":"eb85a4722f571ccc61d99990ff5f8fe34ecfe6b1c39f6a479eff60aa8fc3162e"} Feb 02 11:00:01 crc kubenswrapper[4838]: I0202 11:00:01.667122 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2f7cb" event={"ID":"d5cef155-b12d-4e9b-81b7-9a224b8fe5b3","Type":"ContainerStarted","Data":"e4232720588d0e4c4c1df0285cd822b7d07d76ffd807ac29e9577a7ba7774eb1"} Feb 02 11:00:01 crc kubenswrapper[4838]: I0202 11:00:01.686803 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz" podStartSLOduration=1.686782767 podStartE2EDuration="1.686782767s" podCreationTimestamp="2026-02-02 11:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:00:01.685036111 +0000 UTC m=+396.022137139" watchObservedRunningTime="2026-02-02 11:00:01.686782767 +0000 UTC m=+396.023883805" Feb 02 11:00:01 crc kubenswrapper[4838]: I0202 11:00:01.706848 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2f7cb" podStartSLOduration=2.241074057 podStartE2EDuration="7.706827392s" podCreationTimestamp="2026-02-02 10:59:54 +0000 UTC" firstStartedPulling="2026-02-02 10:59:55.714991655 +0000 UTC m=+390.052092683" lastFinishedPulling="2026-02-02 11:00:01.18074499 +0000 UTC m=+395.517846018" observedRunningTime="2026-02-02 11:00:01.702422424 +0000 UTC m=+396.039523472" watchObservedRunningTime="2026-02-02 11:00:01.706827392 +0000 UTC m=+396.043928450" Feb 02 11:00:01 crc kubenswrapper[4838]: I0202 11:00:01.717951 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tdj7s" Feb 02 11:00:02 crc kubenswrapper[4838]: I0202 11:00:02.128367 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-62q4t" Feb 02 11:00:02 crc kubenswrapper[4838]: I0202 11:00:02.128634 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-62q4t" Feb 02 11:00:02 crc kubenswrapper[4838]: I0202 11:00:02.672949 4838 generic.go:334] "Generic (PLEG): container finished" podID="05d7eca8-60b7-46dc-bdf0-510d63e525e6" containerID="321d34dac8b8e4a0bbf874a99a057dd2186b893be617243b10f78e1e92bd3b98" exitCode=0 Feb 02 11:00:02 crc kubenswrapper[4838]: I0202 11:00:02.673086 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz" event={"ID":"05d7eca8-60b7-46dc-bdf0-510d63e525e6","Type":"ContainerDied","Data":"321d34dac8b8e4a0bbf874a99a057dd2186b893be617243b10f78e1e92bd3b98"} Feb 02 11:00:03 crc kubenswrapper[4838]: I0202 11:00:03.139829 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dcrh2" Feb 02 11:00:03 crc kubenswrapper[4838]: I0202 11:00:03.139877 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dcrh2" Feb 02 11:00:03 crc kubenswrapper[4838]: I0202 11:00:03.168774 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-62q4t" podUID="71ec42fd-382d-43bd-9353-24e15ac2e795" containerName="registry-server" probeResult="failure" output=< Feb 02 11:00:03 crc kubenswrapper[4838]: timeout: failed to connect service ":50051" within 1s Feb 02 11:00:03 crc kubenswrapper[4838]: > Feb 02 11:00:03 crc kubenswrapper[4838]: I0202 11:00:03.185700 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dcrh2" Feb 02 11:00:03 crc kubenswrapper[4838]: I0202 11:00:03.982056 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz" Feb 02 11:00:04 crc kubenswrapper[4838]: I0202 11:00:04.109301 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/05d7eca8-60b7-46dc-bdf0-510d63e525e6-config-volume\") pod \"05d7eca8-60b7-46dc-bdf0-510d63e525e6\" (UID: \"05d7eca8-60b7-46dc-bdf0-510d63e525e6\") " Feb 02 11:00:04 crc kubenswrapper[4838]: I0202 11:00:04.109399 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/05d7eca8-60b7-46dc-bdf0-510d63e525e6-secret-volume\") pod \"05d7eca8-60b7-46dc-bdf0-510d63e525e6\" (UID: \"05d7eca8-60b7-46dc-bdf0-510d63e525e6\") " Feb 02 11:00:04 crc kubenswrapper[4838]: I0202 11:00:04.109437 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5fkxg\" (UniqueName: \"kubernetes.io/projected/05d7eca8-60b7-46dc-bdf0-510d63e525e6-kube-api-access-5fkxg\") pod \"05d7eca8-60b7-46dc-bdf0-510d63e525e6\" (UID: \"05d7eca8-60b7-46dc-bdf0-510d63e525e6\") " Feb 02 11:00:04 crc kubenswrapper[4838]: I0202 11:00:04.110257 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05d7eca8-60b7-46dc-bdf0-510d63e525e6-config-volume" (OuterVolumeSpecName: "config-volume") pod "05d7eca8-60b7-46dc-bdf0-510d63e525e6" (UID: "05d7eca8-60b7-46dc-bdf0-510d63e525e6"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:00:04 crc kubenswrapper[4838]: I0202 11:00:04.120780 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05d7eca8-60b7-46dc-bdf0-510d63e525e6-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "05d7eca8-60b7-46dc-bdf0-510d63e525e6" (UID: "05d7eca8-60b7-46dc-bdf0-510d63e525e6"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:00:04 crc kubenswrapper[4838]: I0202 11:00:04.120794 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05d7eca8-60b7-46dc-bdf0-510d63e525e6-kube-api-access-5fkxg" (OuterVolumeSpecName: "kube-api-access-5fkxg") pod "05d7eca8-60b7-46dc-bdf0-510d63e525e6" (UID: "05d7eca8-60b7-46dc-bdf0-510d63e525e6"). InnerVolumeSpecName "kube-api-access-5fkxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:00:04 crc kubenswrapper[4838]: I0202 11:00:04.210970 4838 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/05d7eca8-60b7-46dc-bdf0-510d63e525e6-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 11:00:04 crc kubenswrapper[4838]: I0202 11:00:04.211350 4838 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/05d7eca8-60b7-46dc-bdf0-510d63e525e6-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 11:00:04 crc kubenswrapper[4838]: I0202 11:00:04.211506 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5fkxg\" (UniqueName: \"kubernetes.io/projected/05d7eca8-60b7-46dc-bdf0-510d63e525e6-kube-api-access-5fkxg\") on node \"crc\" DevicePath \"\"" Feb 02 11:00:04 crc kubenswrapper[4838]: I0202 11:00:04.531119 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2f7cb" Feb 02 11:00:04 crc kubenswrapper[4838]: I0202 11:00:04.531537 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2f7cb" Feb 02 11:00:04 crc kubenswrapper[4838]: I0202 11:00:04.585192 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2f7cb" Feb 02 11:00:04 crc kubenswrapper[4838]: I0202 11:00:04.690502 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz" event={"ID":"05d7eca8-60b7-46dc-bdf0-510d63e525e6","Type":"ContainerDied","Data":"eb85a4722f571ccc61d99990ff5f8fe34ecfe6b1c39f6a479eff60aa8fc3162e"} Feb 02 11:00:04 crc kubenswrapper[4838]: I0202 11:00:04.690565 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz" Feb 02 11:00:04 crc kubenswrapper[4838]: I0202 11:00:04.690569 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb85a4722f571ccc61d99990ff5f8fe34ecfe6b1c39f6a479eff60aa8fc3162e" Feb 02 11:00:11 crc kubenswrapper[4838]: I0202 11:00:11.439808 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" podUID="0adb948a-923d-44f9-8cad-f36fe04a90b2" containerName="registry" containerID="cri-o://a65f736fe5dcc893b94052721613ade53a8256e937aa92d8a7633f858161061c" gracePeriod=30 Feb 02 11:00:12 crc kubenswrapper[4838]: I0202 11:00:12.200698 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-62q4t" Feb 02 11:00:12 crc kubenswrapper[4838]: I0202 11:00:12.259968 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-62q4t" Feb 02 11:00:13 crc kubenswrapper[4838]: I0202 11:00:13.199196 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dcrh2" Feb 02 11:00:14 crc kubenswrapper[4838]: I0202 11:00:14.587970 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2f7cb" Feb 02 11:00:15 crc kubenswrapper[4838]: I0202 11:00:15.430195 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:00:15 crc kubenswrapper[4838]: I0202 11:00:15.430263 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:00:15 crc kubenswrapper[4838]: I0202 11:00:15.430325 4838 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 11:00:15 crc kubenswrapper[4838]: I0202 11:00:15.430953 4838 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"788680db6347a60c2bbc6d7b7baac3c8eb4876b9f269eb890bcb4237052aee16"} pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 11:00:15 crc kubenswrapper[4838]: I0202 11:00:15.431027 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" containerID="cri-o://788680db6347a60c2bbc6d7b7baac3c8eb4876b9f269eb890bcb4237052aee16" gracePeriod=600 Feb 02 11:00:16 crc kubenswrapper[4838]: I0202 11:00:16.267673 4838 generic.go:334] "Generic (PLEG): container finished" podID="0adb948a-923d-44f9-8cad-f36fe04a90b2" containerID="a65f736fe5dcc893b94052721613ade53a8256e937aa92d8a7633f858161061c" exitCode=0 Feb 02 11:00:16 crc kubenswrapper[4838]: I0202 11:00:16.267755 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" event={"ID":"0adb948a-923d-44f9-8cad-f36fe04a90b2","Type":"ContainerDied","Data":"a65f736fe5dcc893b94052721613ade53a8256e937aa92d8a7633f858161061c"} Feb 02 11:00:16 crc kubenswrapper[4838]: I0202 11:00:16.279694 4838 patch_prober.go:28] interesting pod/image-registry-697d97f7c8-8b9cc container/registry namespace/openshift-image-registry: Readiness probe status=failure output="Get \"https://10.217.0.19:5000/healthz\": dial tcp 10.217.0.19:5000: connect: connection refused" start-of-body= Feb 02 11:00:16 crc kubenswrapper[4838]: I0202 11:00:16.279764 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" podUID="0adb948a-923d-44f9-8cad-f36fe04a90b2" containerName="registry" probeResult="failure" output="Get \"https://10.217.0.19:5000/healthz\": dial tcp 10.217.0.19:5000: connect: connection refused" Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.180856 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.246513 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0adb948a-923d-44f9-8cad-f36fe04a90b2-trusted-ca\") pod \"0adb948a-923d-44f9-8cad-f36fe04a90b2\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.246595 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hzzk9\" (UniqueName: \"kubernetes.io/projected/0adb948a-923d-44f9-8cad-f36fe04a90b2-kube-api-access-hzzk9\") pod \"0adb948a-923d-44f9-8cad-f36fe04a90b2\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.246646 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0adb948a-923d-44f9-8cad-f36fe04a90b2-registry-tls\") pod \"0adb948a-923d-44f9-8cad-f36fe04a90b2\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.246668 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0adb948a-923d-44f9-8cad-f36fe04a90b2-ca-trust-extracted\") pod \"0adb948a-923d-44f9-8cad-f36fe04a90b2\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.246698 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0adb948a-923d-44f9-8cad-f36fe04a90b2-installation-pull-secrets\") pod \"0adb948a-923d-44f9-8cad-f36fe04a90b2\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.246758 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0adb948a-923d-44f9-8cad-f36fe04a90b2-registry-certificates\") pod \"0adb948a-923d-44f9-8cad-f36fe04a90b2\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.246900 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"0adb948a-923d-44f9-8cad-f36fe04a90b2\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.246925 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0adb948a-923d-44f9-8cad-f36fe04a90b2-bound-sa-token\") pod \"0adb948a-923d-44f9-8cad-f36fe04a90b2\" (UID: \"0adb948a-923d-44f9-8cad-f36fe04a90b2\") " Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.252689 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0adb948a-923d-44f9-8cad-f36fe04a90b2-kube-api-access-hzzk9" (OuterVolumeSpecName: "kube-api-access-hzzk9") pod "0adb948a-923d-44f9-8cad-f36fe04a90b2" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2"). InnerVolumeSpecName "kube-api-access-hzzk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.277285 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" event={"ID":"0adb948a-923d-44f9-8cad-f36fe04a90b2","Type":"ContainerDied","Data":"04e4d9070542c16aa0e16a60e968e3aa47677349f41f688627c5e3b381b93322"} Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.277335 4838 scope.go:117] "RemoveContainer" containerID="a65f736fe5dcc893b94052721613ade53a8256e937aa92d8a7633f858161061c" Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.277387 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-8b9cc" Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.348148 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hzzk9\" (UniqueName: \"kubernetes.io/projected/0adb948a-923d-44f9-8cad-f36fe04a90b2-kube-api-access-hzzk9\") on node \"crc\" DevicePath \"\"" Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.434130 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0adb948a-923d-44f9-8cad-f36fe04a90b2-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "0adb948a-923d-44f9-8cad-f36fe04a90b2" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.434153 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0adb948a-923d-44f9-8cad-f36fe04a90b2-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "0adb948a-923d-44f9-8cad-f36fe04a90b2" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.434904 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "0adb948a-923d-44f9-8cad-f36fe04a90b2" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.442159 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0adb948a-923d-44f9-8cad-f36fe04a90b2-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "0adb948a-923d-44f9-8cad-f36fe04a90b2" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.442537 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0adb948a-923d-44f9-8cad-f36fe04a90b2-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "0adb948a-923d-44f9-8cad-f36fe04a90b2" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.442865 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0adb948a-923d-44f9-8cad-f36fe04a90b2-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "0adb948a-923d-44f9-8cad-f36fe04a90b2" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.444984 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0adb948a-923d-44f9-8cad-f36fe04a90b2-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "0adb948a-923d-44f9-8cad-f36fe04a90b2" (UID: "0adb948a-923d-44f9-8cad-f36fe04a90b2"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.449753 4838 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0adb948a-923d-44f9-8cad-f36fe04a90b2-registry-certificates\") on node \"crc\" DevicePath \"\"" Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.449853 4838 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0adb948a-923d-44f9-8cad-f36fe04a90b2-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.449924 4838 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0adb948a-923d-44f9-8cad-f36fe04a90b2-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.449992 4838 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0adb948a-923d-44f9-8cad-f36fe04a90b2-registry-tls\") on node \"crc\" DevicePath \"\"" Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.450053 4838 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0adb948a-923d-44f9-8cad-f36fe04a90b2-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.450116 4838 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0adb948a-923d-44f9-8cad-f36fe04a90b2-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.607601 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8b9cc"] Feb 02 11:00:18 crc kubenswrapper[4838]: I0202 11:00:18.618757 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8b9cc"] Feb 02 11:00:19 crc kubenswrapper[4838]: I0202 11:00:19.287224 4838 generic.go:334] "Generic (PLEG): container finished" podID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerID="788680db6347a60c2bbc6d7b7baac3c8eb4876b9f269eb890bcb4237052aee16" exitCode=0 Feb 02 11:00:19 crc kubenswrapper[4838]: I0202 11:00:19.287297 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerDied","Data":"788680db6347a60c2bbc6d7b7baac3c8eb4876b9f269eb890bcb4237052aee16"} Feb 02 11:00:19 crc kubenswrapper[4838]: I0202 11:00:19.287338 4838 scope.go:117] "RemoveContainer" containerID="f5e1c58e9f021684d7724a81b0433857d6dfdcf3bcae7af9610645e44e38d8ef" Feb 02 11:00:20 crc kubenswrapper[4838]: I0202 11:00:20.300299 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerStarted","Data":"9ae800805fcd8b11ea61c3e69aa000f94373b0daa53dc9b9faa0877f99ba8a3a"} Feb 02 11:00:20 crc kubenswrapper[4838]: I0202 11:00:20.519066 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0adb948a-923d-44f9-8cad-f36fe04a90b2" path="/var/lib/kubelet/pods/0adb948a-923d-44f9-8cad-f36fe04a90b2/volumes" Feb 02 11:02:45 crc kubenswrapper[4838]: I0202 11:02:45.429749 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:02:45 crc kubenswrapper[4838]: I0202 11:02:45.430368 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:03:15 crc kubenswrapper[4838]: I0202 11:03:15.429815 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:03:15 crc kubenswrapper[4838]: I0202 11:03:15.430445 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:03:45 crc kubenswrapper[4838]: I0202 11:03:45.429944 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:03:45 crc kubenswrapper[4838]: I0202 11:03:45.430602 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:03:45 crc kubenswrapper[4838]: I0202 11:03:45.430726 4838 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 11:03:45 crc kubenswrapper[4838]: I0202 11:03:45.431700 4838 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9ae800805fcd8b11ea61c3e69aa000f94373b0daa53dc9b9faa0877f99ba8a3a"} pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 11:03:45 crc kubenswrapper[4838]: I0202 11:03:45.431813 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" containerID="cri-o://9ae800805fcd8b11ea61c3e69aa000f94373b0daa53dc9b9faa0877f99ba8a3a" gracePeriod=600 Feb 02 11:03:45 crc kubenswrapper[4838]: I0202 11:03:45.654156 4838 generic.go:334] "Generic (PLEG): container finished" podID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerID="9ae800805fcd8b11ea61c3e69aa000f94373b0daa53dc9b9faa0877f99ba8a3a" exitCode=0 Feb 02 11:03:45 crc kubenswrapper[4838]: I0202 11:03:45.654235 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerDied","Data":"9ae800805fcd8b11ea61c3e69aa000f94373b0daa53dc9b9faa0877f99ba8a3a"} Feb 02 11:03:45 crc kubenswrapper[4838]: I0202 11:03:45.654285 4838 scope.go:117] "RemoveContainer" containerID="788680db6347a60c2bbc6d7b7baac3c8eb4876b9f269eb890bcb4237052aee16" Feb 02 11:03:46 crc kubenswrapper[4838]: I0202 11:03:46.667137 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerStarted","Data":"47f84caf6e841371c2d1b572818b4b359f4d7377669649aa2f737bf7eb7b98db"} Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.708968 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-t4jjz"] Feb 02 11:04:20 crc kubenswrapper[4838]: E0202 11:04:20.709732 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0adb948a-923d-44f9-8cad-f36fe04a90b2" containerName="registry" Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.709748 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="0adb948a-923d-44f9-8cad-f36fe04a90b2" containerName="registry" Feb 02 11:04:20 crc kubenswrapper[4838]: E0202 11:04:20.709764 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05d7eca8-60b7-46dc-bdf0-510d63e525e6" containerName="collect-profiles" Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.709773 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="05d7eca8-60b7-46dc-bdf0-510d63e525e6" containerName="collect-profiles" Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.709890 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="05d7eca8-60b7-46dc-bdf0-510d63e525e6" containerName="collect-profiles" Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.709912 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="0adb948a-923d-44f9-8cad-f36fe04a90b2" containerName="registry" Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.710569 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-t4jjz" Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.714156 4838 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-tbcms" Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.715136 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.715398 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.723587 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-858654f9db-cl9xl"] Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.724614 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-cl9xl" Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.727011 4838 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-x5w4q" Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.754764 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-t4jjz"] Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.758284 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-cl9xl"] Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.763725 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-tvmg6"] Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.764488 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-tvmg6" Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.766727 4838 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-x9752" Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.782345 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdzzh\" (UniqueName: \"kubernetes.io/projected/b7b5f720-0add-47c9-890a-4ca936379c93-kube-api-access-jdzzh\") pod \"cert-manager-858654f9db-cl9xl\" (UID: \"b7b5f720-0add-47c9-890a-4ca936379c93\") " pod="cert-manager/cert-manager-858654f9db-cl9xl" Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.782401 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7txsx\" (UniqueName: \"kubernetes.io/projected/59de6fa5-ca64-482d-81af-d1bfd5e7cba4-kube-api-access-7txsx\") pod \"cert-manager-webhook-687f57d79b-tvmg6\" (UID: \"59de6fa5-ca64-482d-81af-d1bfd5e7cba4\") " pod="cert-manager/cert-manager-webhook-687f57d79b-tvmg6" Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.782437 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97cp9\" (UniqueName: \"kubernetes.io/projected/5206c985-0926-498a-b2ae-3be3a5034206-kube-api-access-97cp9\") pod \"cert-manager-cainjector-cf98fcc89-t4jjz\" (UID: \"5206c985-0926-498a-b2ae-3be3a5034206\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-t4jjz" Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.785974 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-tvmg6"] Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.883927 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97cp9\" (UniqueName: \"kubernetes.io/projected/5206c985-0926-498a-b2ae-3be3a5034206-kube-api-access-97cp9\") pod \"cert-manager-cainjector-cf98fcc89-t4jjz\" (UID: \"5206c985-0926-498a-b2ae-3be3a5034206\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-t4jjz" Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.884102 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdzzh\" (UniqueName: \"kubernetes.io/projected/b7b5f720-0add-47c9-890a-4ca936379c93-kube-api-access-jdzzh\") pod \"cert-manager-858654f9db-cl9xl\" (UID: \"b7b5f720-0add-47c9-890a-4ca936379c93\") " pod="cert-manager/cert-manager-858654f9db-cl9xl" Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.884150 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7txsx\" (UniqueName: \"kubernetes.io/projected/59de6fa5-ca64-482d-81af-d1bfd5e7cba4-kube-api-access-7txsx\") pod \"cert-manager-webhook-687f57d79b-tvmg6\" (UID: \"59de6fa5-ca64-482d-81af-d1bfd5e7cba4\") " pod="cert-manager/cert-manager-webhook-687f57d79b-tvmg6" Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.902248 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97cp9\" (UniqueName: \"kubernetes.io/projected/5206c985-0926-498a-b2ae-3be3a5034206-kube-api-access-97cp9\") pod \"cert-manager-cainjector-cf98fcc89-t4jjz\" (UID: \"5206c985-0926-498a-b2ae-3be3a5034206\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-t4jjz" Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.910497 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdzzh\" (UniqueName: \"kubernetes.io/projected/b7b5f720-0add-47c9-890a-4ca936379c93-kube-api-access-jdzzh\") pod \"cert-manager-858654f9db-cl9xl\" (UID: \"b7b5f720-0add-47c9-890a-4ca936379c93\") " pod="cert-manager/cert-manager-858654f9db-cl9xl" Feb 02 11:04:20 crc kubenswrapper[4838]: I0202 11:04:20.911285 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7txsx\" (UniqueName: \"kubernetes.io/projected/59de6fa5-ca64-482d-81af-d1bfd5e7cba4-kube-api-access-7txsx\") pod \"cert-manager-webhook-687f57d79b-tvmg6\" (UID: \"59de6fa5-ca64-482d-81af-d1bfd5e7cba4\") " pod="cert-manager/cert-manager-webhook-687f57d79b-tvmg6" Feb 02 11:04:21 crc kubenswrapper[4838]: I0202 11:04:21.043586 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-t4jjz" Feb 02 11:04:21 crc kubenswrapper[4838]: I0202 11:04:21.064157 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-cl9xl" Feb 02 11:04:21 crc kubenswrapper[4838]: I0202 11:04:21.082954 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-tvmg6" Feb 02 11:04:21 crc kubenswrapper[4838]: I0202 11:04:21.372812 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-tvmg6"] Feb 02 11:04:21 crc kubenswrapper[4838]: I0202 11:04:21.378390 4838 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 11:04:21 crc kubenswrapper[4838]: I0202 11:04:21.527457 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-cl9xl"] Feb 02 11:04:21 crc kubenswrapper[4838]: I0202 11:04:21.536167 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-t4jjz"] Feb 02 11:04:21 crc kubenswrapper[4838]: I0202 11:04:21.902929 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-t4jjz" event={"ID":"5206c985-0926-498a-b2ae-3be3a5034206","Type":"ContainerStarted","Data":"064fecf439861cac3f1fbcfe51dcd5efacea20a6cb2eb380183387f8e39d25bb"} Feb 02 11:04:21 crc kubenswrapper[4838]: I0202 11:04:21.904357 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-tvmg6" event={"ID":"59de6fa5-ca64-482d-81af-d1bfd5e7cba4","Type":"ContainerStarted","Data":"6c718986a15a11b93475598b1eec13360ea4e7fce19303a78114a60e78bde2c3"} Feb 02 11:04:21 crc kubenswrapper[4838]: I0202 11:04:21.905673 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-cl9xl" event={"ID":"b7b5f720-0add-47c9-890a-4ca936379c93","Type":"ContainerStarted","Data":"8adb00e2cdd9174235d4ada79dfa03d2d29180a15d3b6f082f015d74f9c405f3"} Feb 02 11:04:27 crc kubenswrapper[4838]: I0202 11:04:27.944443 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-t4jjz" event={"ID":"5206c985-0926-498a-b2ae-3be3a5034206","Type":"ContainerStarted","Data":"d32b3091a92f49766875857100f7704d2a5ab10c4f7359cbf80caf5bb5a790ca"} Feb 02 11:04:27 crc kubenswrapper[4838]: I0202 11:04:27.947344 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-tvmg6" event={"ID":"59de6fa5-ca64-482d-81af-d1bfd5e7cba4","Type":"ContainerStarted","Data":"4a865e7f27bfccbe44cdc7dd9d58694628ceccda265147c20fde1512e4c9b08b"} Feb 02 11:04:27 crc kubenswrapper[4838]: I0202 11:04:27.947551 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-687f57d79b-tvmg6" Feb 02 11:04:27 crc kubenswrapper[4838]: I0202 11:04:27.949671 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-cl9xl" event={"ID":"b7b5f720-0add-47c9-890a-4ca936379c93","Type":"ContainerStarted","Data":"68c58253a27ccccc5a20845722c16973ae0263a9ad6ad970995111d7e2573457"} Feb 02 11:04:27 crc kubenswrapper[4838]: I0202 11:04:27.966173 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-cf98fcc89-t4jjz" podStartSLOduration=2.289661174 podStartE2EDuration="7.966148352s" podCreationTimestamp="2026-02-02 11:04:20 +0000 UTC" firstStartedPulling="2026-02-02 11:04:21.544882094 +0000 UTC m=+655.881983162" lastFinishedPulling="2026-02-02 11:04:27.221369302 +0000 UTC m=+661.558470340" observedRunningTime="2026-02-02 11:04:27.965433613 +0000 UTC m=+662.302534691" watchObservedRunningTime="2026-02-02 11:04:27.966148352 +0000 UTC m=+662.303249420" Feb 02 11:04:28 crc kubenswrapper[4838]: I0202 11:04:28.000658 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-687f57d79b-tvmg6" podStartSLOduration=2.156506836 podStartE2EDuration="8.000644653s" podCreationTimestamp="2026-02-02 11:04:20 +0000 UTC" firstStartedPulling="2026-02-02 11:04:21.37817002 +0000 UTC m=+655.715271048" lastFinishedPulling="2026-02-02 11:04:27.222307797 +0000 UTC m=+661.559408865" observedRunningTime="2026-02-02 11:04:27.999153734 +0000 UTC m=+662.336254802" watchObservedRunningTime="2026-02-02 11:04:28.000644653 +0000 UTC m=+662.337745681" Feb 02 11:04:28 crc kubenswrapper[4838]: I0202 11:04:28.018367 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-858654f9db-cl9xl" podStartSLOduration=2.215525163 podStartE2EDuration="8.018342966s" podCreationTimestamp="2026-02-02 11:04:20 +0000 UTC" firstStartedPulling="2026-02-02 11:04:21.542657545 +0000 UTC m=+655.879758563" lastFinishedPulling="2026-02-02 11:04:27.345475298 +0000 UTC m=+661.682576366" observedRunningTime="2026-02-02 11:04:28.01587846 +0000 UTC m=+662.352979488" watchObservedRunningTime="2026-02-02 11:04:28.018342966 +0000 UTC m=+662.355444024" Feb 02 11:04:30 crc kubenswrapper[4838]: I0202 11:04:30.563401 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-66l9c"] Feb 02 11:04:30 crc kubenswrapper[4838]: I0202 11:04:30.564657 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovn-controller" containerID="cri-o://047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d" gracePeriod=30 Feb 02 11:04:30 crc kubenswrapper[4838]: I0202 11:04:30.564848 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682" gracePeriod=30 Feb 02 11:04:30 crc kubenswrapper[4838]: I0202 11:04:30.564862 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="northd" containerID="cri-o://c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2" gracePeriod=30 Feb 02 11:04:30 crc kubenswrapper[4838]: I0202 11:04:30.565412 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="sbdb" containerID="cri-o://da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c" gracePeriod=30 Feb 02 11:04:30 crc kubenswrapper[4838]: I0202 11:04:30.565149 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="kube-rbac-proxy-node" containerID="cri-o://6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572" gracePeriod=30 Feb 02 11:04:30 crc kubenswrapper[4838]: I0202 11:04:30.565441 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="nbdb" containerID="cri-o://d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d" gracePeriod=30 Feb 02 11:04:30 crc kubenswrapper[4838]: I0202 11:04:30.565092 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovn-acl-logging" containerID="cri-o://188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f" gracePeriod=30 Feb 02 11:04:30 crc kubenswrapper[4838]: I0202 11:04:30.606396 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovnkube-controller" containerID="cri-o://56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d" gracePeriod=30 Feb 02 11:04:30 crc kubenswrapper[4838]: I0202 11:04:30.974141 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovnkube-controller/3.log" Feb 02 11:04:30 crc kubenswrapper[4838]: I0202 11:04:30.977778 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovn-acl-logging/0.log" Feb 02 11:04:30 crc kubenswrapper[4838]: I0202 11:04:30.978835 4838 generic.go:334] "Generic (PLEG): container finished" podID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerID="188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f" exitCode=143 Feb 02 11:04:30 crc kubenswrapper[4838]: I0202 11:04:30.979047 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerDied","Data":"188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f"} Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.445894 4838 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c is running failed: container process not found" containerID="da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.446665 4838 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c is running failed: container process not found" containerID="da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.446825 4838 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d is running failed: container process not found" containerID="d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.447343 4838 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c is running failed: container process not found" containerID="da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.447344 4838 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d is running failed: container process not found" containerID="d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.447405 4838 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c is running failed: container process not found" probeType="Readiness" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="sbdb" Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.447904 4838 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d is running failed: container process not found" containerID="d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.447963 4838 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d is running failed: container process not found" probeType="Readiness" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="nbdb" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.634378 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovnkube-controller/3.log" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.638879 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovn-acl-logging/0.log" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.639602 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovn-controller/0.log" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.640467 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.649232 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7p8h\" (UniqueName: \"kubernetes.io/projected/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-kube-api-access-w7p8h\") pod \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.649393 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-cni-netd\") pod \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.649492 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "9bc00b9c-6e31-4f8e-b4ba-44150281ed69" (UID: "9bc00b9c-6e31-4f8e-b4ba-44150281ed69"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.649578 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-run-systemd\") pod \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.649709 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-kubelet\") pod \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.649743 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-run-ovn\") pod \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.649783 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-ovnkube-config\") pod \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.649808 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "9bc00b9c-6e31-4f8e-b4ba-44150281ed69" (UID: "9bc00b9c-6e31-4f8e-b4ba-44150281ed69"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.649847 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-run-openvswitch\") pod \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.649884 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-slash\") pod \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.649919 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-env-overrides\") pod \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.649890 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "9bc00b9c-6e31-4f8e-b4ba-44150281ed69" (UID: "9bc00b9c-6e31-4f8e-b4ba-44150281ed69"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.649952 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-run-netns\") pod \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.649998 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-slash" (OuterVolumeSpecName: "host-slash") pod "9bc00b9c-6e31-4f8e-b4ba-44150281ed69" (UID: "9bc00b9c-6e31-4f8e-b4ba-44150281ed69"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.650075 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "9bc00b9c-6e31-4f8e-b4ba-44150281ed69" (UID: "9bc00b9c-6e31-4f8e-b4ba-44150281ed69"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.650463 4838 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-cni-netd\") on node \"crc\" DevicePath \"\"" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.650512 4838 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.650548 4838 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-kubelet\") on node \"crc\" DevicePath \"\"" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.650570 4838 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-run-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.650561 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "9bc00b9c-6e31-4f8e-b4ba-44150281ed69" (UID: "9bc00b9c-6e31-4f8e-b4ba-44150281ed69"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.650587 4838 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-slash\") on node \"crc\" DevicePath \"\"" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.650757 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "9bc00b9c-6e31-4f8e-b4ba-44150281ed69" (UID: "9bc00b9c-6e31-4f8e-b4ba-44150281ed69"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.650825 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "9bc00b9c-6e31-4f8e-b4ba-44150281ed69" (UID: "9bc00b9c-6e31-4f8e-b4ba-44150281ed69"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.658476 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-kube-api-access-w7p8h" (OuterVolumeSpecName: "kube-api-access-w7p8h") pod "9bc00b9c-6e31-4f8e-b4ba-44150281ed69" (UID: "9bc00b9c-6e31-4f8e-b4ba-44150281ed69"). InnerVolumeSpecName "kube-api-access-w7p8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.674373 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "9bc00b9c-6e31-4f8e-b4ba-44150281ed69" (UID: "9bc00b9c-6e31-4f8e-b4ba-44150281ed69"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.729573 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-gttjk"] Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.729808 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovnkube-controller" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.729822 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovnkube-controller" Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.729833 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovnkube-controller" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.729841 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovnkube-controller" Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.729856 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="northd" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.729865 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="northd" Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.729874 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="sbdb" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.729883 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="sbdb" Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.729894 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovnkube-controller" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.729902 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovnkube-controller" Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.729912 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovn-acl-logging" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.729921 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovn-acl-logging" Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.729931 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="kube-rbac-proxy-node" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.729939 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="kube-rbac-proxy-node" Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.729953 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="kubecfg-setup" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.729960 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="kubecfg-setup" Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.729971 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovn-controller" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.729979 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovn-controller" Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.729987 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="nbdb" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.729995 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="nbdb" Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.730004 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="kube-rbac-proxy-ovn-metrics" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.730012 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="kube-rbac-proxy-ovn-metrics" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.730140 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovn-acl-logging" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.730155 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="kube-rbac-proxy-ovn-metrics" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.730164 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovnkube-controller" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.730173 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="kube-rbac-proxy-node" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.730183 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovnkube-controller" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.730194 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="northd" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.730205 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovnkube-controller" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.730217 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovn-controller" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.730226 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovnkube-controller" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.730236 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="sbdb" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.730244 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="nbdb" Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.730344 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovnkube-controller" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.730353 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovnkube-controller" Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.730361 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovnkube-controller" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.730370 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovnkube-controller" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.730478 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerName="ovnkube-controller" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.735288 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.750910 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-ovnkube-script-lib\") pod \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.751180 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-cni-bin\") pod \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.751333 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-var-lib-cni-networks-ovn-kubernetes\") pod \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.751800 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-systemd-units\") pod \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.752064 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-log-socket\") pod \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.751820 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "9bc00b9c-6e31-4f8e-b4ba-44150281ed69" (UID: "9bc00b9c-6e31-4f8e-b4ba-44150281ed69"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.752313 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-log-socket" (OuterVolumeSpecName: "log-socket") pod "9bc00b9c-6e31-4f8e-b4ba-44150281ed69" (UID: "9bc00b9c-6e31-4f8e-b4ba-44150281ed69"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.752286 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-etc-openvswitch\") pod \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.751847 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "9bc00b9c-6e31-4f8e-b4ba-44150281ed69" (UID: "9bc00b9c-6e31-4f8e-b4ba-44150281ed69"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.752420 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-var-lib-openvswitch\") pod \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.751866 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "9bc00b9c-6e31-4f8e-b4ba-44150281ed69" (UID: "9bc00b9c-6e31-4f8e-b4ba-44150281ed69"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.752465 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-ovn-node-metrics-cert\") pod \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.752496 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-run-ovn-kubernetes\") pod \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.752520 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-node-log\") pod \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\" (UID: \"9bc00b9c-6e31-4f8e-b4ba-44150281ed69\") " Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.752524 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "9bc00b9c-6e31-4f8e-b4ba-44150281ed69" (UID: "9bc00b9c-6e31-4f8e-b4ba-44150281ed69"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.752740 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-host-cni-bin\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.752773 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-host-cni-netd\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.752766 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "9bc00b9c-6e31-4f8e-b4ba-44150281ed69" (UID: "9bc00b9c-6e31-4f8e-b4ba-44150281ed69"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.752822 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "9bc00b9c-6e31-4f8e-b4ba-44150281ed69" (UID: "9bc00b9c-6e31-4f8e-b4ba-44150281ed69"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.752794 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-node-log\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.752868 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-node-log" (OuterVolumeSpecName: "node-log") pod "9bc00b9c-6e31-4f8e-b4ba-44150281ed69" (UID: "9bc00b9c-6e31-4f8e-b4ba-44150281ed69"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.752881 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b869w\" (UniqueName: \"kubernetes.io/projected/3681cf6d-4236-4114-8d62-c68052318719-kube-api-access-b869w\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.753013 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-host-slash\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.753069 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3681cf6d-4236-4114-8d62-c68052318719-ovnkube-config\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.753124 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-host-run-netns\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.753200 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-log-socket\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.753269 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-host-run-ovn-kubernetes\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.753344 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-host-kubelet\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.753451 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-run-openvswitch\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.753519 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-etc-openvswitch\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.753568 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-systemd-units\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.753671 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-run-systemd\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.753742 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-var-lib-openvswitch\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.753832 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.753888 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3681cf6d-4236-4114-8d62-c68052318719-env-overrides\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.753934 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3681cf6d-4236-4114-8d62-c68052318719-ovn-node-metrics-cert\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.753995 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-run-ovn\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.754045 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3681cf6d-4236-4114-8d62-c68052318719-ovnkube-script-lib\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.754191 4838 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.754226 4838 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.754253 4838 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-node-log\") on node \"crc\" DevicePath \"\"" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.754277 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7p8h\" (UniqueName: \"kubernetes.io/projected/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-kube-api-access-w7p8h\") on node \"crc\" DevicePath \"\"" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.754303 4838 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-run-systemd\") on node \"crc\" DevicePath \"\"" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.754331 4838 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.754356 4838 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.754380 4838 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-run-netns\") on node \"crc\" DevicePath \"\"" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.754405 4838 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.754432 4838 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.754458 4838 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-host-cni-bin\") on node \"crc\" DevicePath \"\"" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.754503 4838 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-systemd-units\") on node \"crc\" DevicePath \"\"" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.754569 4838 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-log-socket\") on node \"crc\" DevicePath \"\"" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.755527 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "9bc00b9c-6e31-4f8e-b4ba-44150281ed69" (UID: "9bc00b9c-6e31-4f8e-b4ba-44150281ed69"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.756864 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "9bc00b9c-6e31-4f8e-b4ba-44150281ed69" (UID: "9bc00b9c-6e31-4f8e-b4ba-44150281ed69"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.856243 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-systemd-units\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.856304 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-systemd-units\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.856354 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-run-systemd\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.856307 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-run-systemd\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.856402 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-var-lib-openvswitch\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.856441 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.856464 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3681cf6d-4236-4114-8d62-c68052318719-env-overrides\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.856484 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3681cf6d-4236-4114-8d62-c68052318719-ovn-node-metrics-cert\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.856508 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-run-ovn\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.856529 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3681cf6d-4236-4114-8d62-c68052318719-ovnkube-script-lib\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.856543 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.856555 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-host-cni-bin\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.856583 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-node-log\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.856596 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-var-lib-openvswitch\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.856642 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-host-cni-netd\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.856602 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-host-cni-netd\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.856694 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b869w\" (UniqueName: \"kubernetes.io/projected/3681cf6d-4236-4114-8d62-c68052318719-kube-api-access-b869w\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.856766 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-host-slash\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.856815 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-host-run-netns\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.856942 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-run-ovn\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.857235 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-host-slash\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.857281 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-host-cni-bin\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.857309 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-node-log\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.857340 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3681cf6d-4236-4114-8d62-c68052318719-env-overrides\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.857413 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-host-run-netns\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.857592 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3681cf6d-4236-4114-8d62-c68052318719-ovnkube-config\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.858812 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-log-socket\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.858848 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-host-run-ovn-kubernetes\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.857924 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3681cf6d-4236-4114-8d62-c68052318719-ovnkube-script-lib\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.858850 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3681cf6d-4236-4114-8d62-c68052318719-ovnkube-config\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.858903 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-host-kubelet\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.858870 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-host-kubelet\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.858930 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-log-socket\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.858947 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-run-openvswitch\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.858974 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-etc-openvswitch\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.858940 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-host-run-ovn-kubernetes\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.858995 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-run-openvswitch\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.859033 4838 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.859048 4838 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9bc00b9c-6e31-4f8e-b4ba-44150281ed69-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.859068 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3681cf6d-4236-4114-8d62-c68052318719-etc-openvswitch\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.862873 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3681cf6d-4236-4114-8d62-c68052318719-ovn-node-metrics-cert\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.892041 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b869w\" (UniqueName: \"kubernetes.io/projected/3681cf6d-4236-4114-8d62-c68052318719-kube-api-access-b869w\") pod \"ovnkube-node-gttjk\" (UID: \"3681cf6d-4236-4114-8d62-c68052318719\") " pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.985053 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovnkube-controller/3.log" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.986890 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovn-acl-logging/0.log" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.987692 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-66l9c_9bc00b9c-6e31-4f8e-b4ba-44150281ed69/ovn-controller/0.log" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988012 4838 generic.go:334] "Generic (PLEG): container finished" podID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerID="56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d" exitCode=0 Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988035 4838 generic.go:334] "Generic (PLEG): container finished" podID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerID="da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c" exitCode=0 Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988044 4838 generic.go:334] "Generic (PLEG): container finished" podID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerID="d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d" exitCode=0 Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988052 4838 generic.go:334] "Generic (PLEG): container finished" podID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerID="c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2" exitCode=0 Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988060 4838 generic.go:334] "Generic (PLEG): container finished" podID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerID="7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682" exitCode=0 Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988068 4838 generic.go:334] "Generic (PLEG): container finished" podID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerID="6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572" exitCode=0 Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988075 4838 generic.go:334] "Generic (PLEG): container finished" podID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" containerID="047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d" exitCode=143 Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988103 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerDied","Data":"56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988127 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988146 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerDied","Data":"da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988167 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerDied","Data":"d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988188 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerDied","Data":"c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988206 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerDied","Data":"7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988223 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerDied","Data":"6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988231 4838 scope.go:117] "RemoveContainer" containerID="56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988239 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988255 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988266 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988276 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988286 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988296 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988307 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988316 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988326 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988340 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerDied","Data":"047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988355 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988367 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988378 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988387 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988398 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988437 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988448 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988458 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988468 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988478 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988492 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-66l9c" event={"ID":"9bc00b9c-6e31-4f8e-b4ba-44150281ed69","Type":"ContainerDied","Data":"9bb4c34ffb8e28c5369bc34e43e9b0bd2827da1a18ac470deb3887221060994a"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988508 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988521 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988531 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988540 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988549 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988558 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988566 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988575 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988584 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.988592 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.989942 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ndxhv_ddc2e893-5801-4e73-a5f6-9cc52f733f49/kube-multus/2.log" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.990886 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ndxhv_ddc2e893-5801-4e73-a5f6-9cc52f733f49/kube-multus/1.log" Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.990924 4838 generic.go:334] "Generic (PLEG): container finished" podID="ddc2e893-5801-4e73-a5f6-9cc52f733f49" containerID="2ad4ae78e0719cb061a0ffafcdac325d6c137147b66ef29798b0a50c30341efc" exitCode=2 Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.990948 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ndxhv" event={"ID":"ddc2e893-5801-4e73-a5f6-9cc52f733f49","Type":"ContainerDied","Data":"2ad4ae78e0719cb061a0ffafcdac325d6c137147b66ef29798b0a50c30341efc"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.990964 4838 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"94192e5199deb1455cbc4f14f600e68e7432dc6e1b246d174b7a4cc4b4359770"} Feb 02 11:04:31 crc kubenswrapper[4838]: I0202 11:04:31.991269 4838 scope.go:117] "RemoveContainer" containerID="2ad4ae78e0719cb061a0ffafcdac325d6c137147b66ef29798b0a50c30341efc" Feb 02 11:04:31 crc kubenswrapper[4838]: E0202 11:04:31.991499 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-ndxhv_openshift-multus(ddc2e893-5801-4e73-a5f6-9cc52f733f49)\"" pod="openshift-multus/multus-ndxhv" podUID="ddc2e893-5801-4e73-a5f6-9cc52f733f49" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.013352 4838 scope.go:117] "RemoveContainer" containerID="f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.034156 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-66l9c"] Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.036530 4838 scope.go:117] "RemoveContainer" containerID="da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.039831 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-66l9c"] Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.049413 4838 scope.go:117] "RemoveContainer" containerID="d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.060443 4838 scope.go:117] "RemoveContainer" containerID="c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.076083 4838 scope.go:117] "RemoveContainer" containerID="7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.092126 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.097010 4838 scope.go:117] "RemoveContainer" containerID="6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.119302 4838 scope.go:117] "RemoveContainer" containerID="188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f" Feb 02 11:04:32 crc kubenswrapper[4838]: W0202 11:04:32.125856 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3681cf6d_4236_4114_8d62_c68052318719.slice/crio-4c05bd3e143ad2dfd3966839227f554915e039be3d414fcf54d887c3e0b7422c WatchSource:0}: Error finding container 4c05bd3e143ad2dfd3966839227f554915e039be3d414fcf54d887c3e0b7422c: Status 404 returned error can't find the container with id 4c05bd3e143ad2dfd3966839227f554915e039be3d414fcf54d887c3e0b7422c Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.132957 4838 scope.go:117] "RemoveContainer" containerID="047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.159237 4838 scope.go:117] "RemoveContainer" containerID="0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.181941 4838 scope.go:117] "RemoveContainer" containerID="56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d" Feb 02 11:04:32 crc kubenswrapper[4838]: E0202 11:04:32.182536 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d\": container with ID starting with 56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d not found: ID does not exist" containerID="56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.182600 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d"} err="failed to get container status \"56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d\": rpc error: code = NotFound desc = could not find container \"56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d\": container with ID starting with 56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.182655 4838 scope.go:117] "RemoveContainer" containerID="f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588" Feb 02 11:04:32 crc kubenswrapper[4838]: E0202 11:04:32.183312 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588\": container with ID starting with f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588 not found: ID does not exist" containerID="f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.183358 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588"} err="failed to get container status \"f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588\": rpc error: code = NotFound desc = could not find container \"f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588\": container with ID starting with f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588 not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.183395 4838 scope.go:117] "RemoveContainer" containerID="da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c" Feb 02 11:04:32 crc kubenswrapper[4838]: E0202 11:04:32.183729 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\": container with ID starting with da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c not found: ID does not exist" containerID="da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.183763 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c"} err="failed to get container status \"da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\": rpc error: code = NotFound desc = could not find container \"da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\": container with ID starting with da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.183783 4838 scope.go:117] "RemoveContainer" containerID="d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d" Feb 02 11:04:32 crc kubenswrapper[4838]: E0202 11:04:32.184083 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\": container with ID starting with d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d not found: ID does not exist" containerID="d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.184112 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d"} err="failed to get container status \"d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\": rpc error: code = NotFound desc = could not find container \"d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\": container with ID starting with d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.184131 4838 scope.go:117] "RemoveContainer" containerID="c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2" Feb 02 11:04:32 crc kubenswrapper[4838]: E0202 11:04:32.184454 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\": container with ID starting with c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2 not found: ID does not exist" containerID="c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.184486 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2"} err="failed to get container status \"c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\": rpc error: code = NotFound desc = could not find container \"c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\": container with ID starting with c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2 not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.184503 4838 scope.go:117] "RemoveContainer" containerID="7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682" Feb 02 11:04:32 crc kubenswrapper[4838]: E0202 11:04:32.185034 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\": container with ID starting with 7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682 not found: ID does not exist" containerID="7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.185059 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682"} err="failed to get container status \"7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\": rpc error: code = NotFound desc = could not find container \"7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\": container with ID starting with 7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682 not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.185089 4838 scope.go:117] "RemoveContainer" containerID="6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572" Feb 02 11:04:32 crc kubenswrapper[4838]: E0202 11:04:32.185432 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\": container with ID starting with 6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572 not found: ID does not exist" containerID="6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.185484 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572"} err="failed to get container status \"6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\": rpc error: code = NotFound desc = could not find container \"6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\": container with ID starting with 6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572 not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.185519 4838 scope.go:117] "RemoveContainer" containerID="188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f" Feb 02 11:04:32 crc kubenswrapper[4838]: E0202 11:04:32.215145 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\": container with ID starting with 188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f not found: ID does not exist" containerID="188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.215234 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f"} err="failed to get container status \"188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\": rpc error: code = NotFound desc = could not find container \"188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\": container with ID starting with 188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.215296 4838 scope.go:117] "RemoveContainer" containerID="047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d" Feb 02 11:04:32 crc kubenswrapper[4838]: E0202 11:04:32.216030 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\": container with ID starting with 047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d not found: ID does not exist" containerID="047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.216078 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d"} err="failed to get container status \"047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\": rpc error: code = NotFound desc = could not find container \"047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\": container with ID starting with 047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.216115 4838 scope.go:117] "RemoveContainer" containerID="0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd" Feb 02 11:04:32 crc kubenswrapper[4838]: E0202 11:04:32.216529 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\": container with ID starting with 0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd not found: ID does not exist" containerID="0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.216552 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd"} err="failed to get container status \"0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\": rpc error: code = NotFound desc = could not find container \"0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\": container with ID starting with 0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.216568 4838 scope.go:117] "RemoveContainer" containerID="56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.216938 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d"} err="failed to get container status \"56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d\": rpc error: code = NotFound desc = could not find container \"56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d\": container with ID starting with 56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.216980 4838 scope.go:117] "RemoveContainer" containerID="f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.217267 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588"} err="failed to get container status \"f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588\": rpc error: code = NotFound desc = could not find container \"f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588\": container with ID starting with f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588 not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.217288 4838 scope.go:117] "RemoveContainer" containerID="da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.217712 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c"} err="failed to get container status \"da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\": rpc error: code = NotFound desc = could not find container \"da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\": container with ID starting with da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.217733 4838 scope.go:117] "RemoveContainer" containerID="d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.218028 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d"} err="failed to get container status \"d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\": rpc error: code = NotFound desc = could not find container \"d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\": container with ID starting with d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.218061 4838 scope.go:117] "RemoveContainer" containerID="c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.218348 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2"} err="failed to get container status \"c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\": rpc error: code = NotFound desc = could not find container \"c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\": container with ID starting with c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2 not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.218368 4838 scope.go:117] "RemoveContainer" containerID="7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.218677 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682"} err="failed to get container status \"7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\": rpc error: code = NotFound desc = could not find container \"7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\": container with ID starting with 7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682 not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.218713 4838 scope.go:117] "RemoveContainer" containerID="6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.219057 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572"} err="failed to get container status \"6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\": rpc error: code = NotFound desc = could not find container \"6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\": container with ID starting with 6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572 not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.219117 4838 scope.go:117] "RemoveContainer" containerID="188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.219495 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f"} err="failed to get container status \"188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\": rpc error: code = NotFound desc = could not find container \"188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\": container with ID starting with 188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.219580 4838 scope.go:117] "RemoveContainer" containerID="047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.220202 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d"} err="failed to get container status \"047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\": rpc error: code = NotFound desc = could not find container \"047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\": container with ID starting with 047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.220231 4838 scope.go:117] "RemoveContainer" containerID="0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.220509 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd"} err="failed to get container status \"0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\": rpc error: code = NotFound desc = could not find container \"0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\": container with ID starting with 0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.220543 4838 scope.go:117] "RemoveContainer" containerID="56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.220811 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d"} err="failed to get container status \"56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d\": rpc error: code = NotFound desc = could not find container \"56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d\": container with ID starting with 56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.220836 4838 scope.go:117] "RemoveContainer" containerID="f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.221140 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588"} err="failed to get container status \"f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588\": rpc error: code = NotFound desc = could not find container \"f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588\": container with ID starting with f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588 not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.221208 4838 scope.go:117] "RemoveContainer" containerID="da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.221551 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c"} err="failed to get container status \"da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\": rpc error: code = NotFound desc = could not find container \"da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\": container with ID starting with da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.221570 4838 scope.go:117] "RemoveContainer" containerID="d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.222157 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d"} err="failed to get container status \"d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\": rpc error: code = NotFound desc = could not find container \"d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\": container with ID starting with d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.222185 4838 scope.go:117] "RemoveContainer" containerID="c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.222454 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2"} err="failed to get container status \"c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\": rpc error: code = NotFound desc = could not find container \"c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\": container with ID starting with c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2 not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.222496 4838 scope.go:117] "RemoveContainer" containerID="7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.222791 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682"} err="failed to get container status \"7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\": rpc error: code = NotFound desc = could not find container \"7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\": container with ID starting with 7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682 not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.222810 4838 scope.go:117] "RemoveContainer" containerID="6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.223067 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572"} err="failed to get container status \"6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\": rpc error: code = NotFound desc = could not find container \"6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\": container with ID starting with 6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572 not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.223097 4838 scope.go:117] "RemoveContainer" containerID="188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.223519 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f"} err="failed to get container status \"188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\": rpc error: code = NotFound desc = could not find container \"188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\": container with ID starting with 188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.223546 4838 scope.go:117] "RemoveContainer" containerID="047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.224503 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d"} err="failed to get container status \"047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\": rpc error: code = NotFound desc = could not find container \"047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\": container with ID starting with 047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.224524 4838 scope.go:117] "RemoveContainer" containerID="0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.224756 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd"} err="failed to get container status \"0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\": rpc error: code = NotFound desc = could not find container \"0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\": container with ID starting with 0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.224778 4838 scope.go:117] "RemoveContainer" containerID="56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.225019 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d"} err="failed to get container status \"56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d\": rpc error: code = NotFound desc = could not find container \"56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d\": container with ID starting with 56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.225041 4838 scope.go:117] "RemoveContainer" containerID="f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.225479 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588"} err="failed to get container status \"f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588\": rpc error: code = NotFound desc = could not find container \"f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588\": container with ID starting with f66f8d11b13d956aceba18a634e6b51c0c58339e196b017ef4b513805af7d588 not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.225513 4838 scope.go:117] "RemoveContainer" containerID="da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.225778 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c"} err="failed to get container status \"da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\": rpc error: code = NotFound desc = could not find container \"da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c\": container with ID starting with da55e74a6e0766b5aab7fcddfd095bf5e7fbd72ec618082734c2e4603d072c3c not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.225801 4838 scope.go:117] "RemoveContainer" containerID="d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.226040 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d"} err="failed to get container status \"d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\": rpc error: code = NotFound desc = could not find container \"d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d\": container with ID starting with d09a67bef283439ccd0d9738300cd319566b52dc64619da5ce30b2bfd0ee4c5d not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.226068 4838 scope.go:117] "RemoveContainer" containerID="c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.226313 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2"} err="failed to get container status \"c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\": rpc error: code = NotFound desc = could not find container \"c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2\": container with ID starting with c3490951bcb1b4170258668ae380b8e665b0feb8d6e9fb74898f39b749ac5db2 not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.226362 4838 scope.go:117] "RemoveContainer" containerID="7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.226598 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682"} err="failed to get container status \"7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\": rpc error: code = NotFound desc = could not find container \"7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682\": container with ID starting with 7cb2267f8b9af75d6629e6d462662052ecb9d4a98322cf41c2da4375f2895682 not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.226641 4838 scope.go:117] "RemoveContainer" containerID="6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.226982 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572"} err="failed to get container status \"6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\": rpc error: code = NotFound desc = could not find container \"6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572\": container with ID starting with 6ffca9c2d86f97b1183b5e767cb5f82890ff87b27081d86385f3a4127dbb1572 not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.227008 4838 scope.go:117] "RemoveContainer" containerID="188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.227366 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f"} err="failed to get container status \"188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\": rpc error: code = NotFound desc = could not find container \"188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f\": container with ID starting with 188c14ef07c9d242759763ba36ddc6cf1cc3f48c6a43604c2f7b0cd5539bfc3f not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.227383 4838 scope.go:117] "RemoveContainer" containerID="047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.227575 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d"} err="failed to get container status \"047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\": rpc error: code = NotFound desc = could not find container \"047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d\": container with ID starting with 047d0e02e8d92882dc79c1f064ac5b80a912176dd3ce68d7675b60cb9dbc423d not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.227598 4838 scope.go:117] "RemoveContainer" containerID="0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.227886 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd"} err="failed to get container status \"0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\": rpc error: code = NotFound desc = could not find container \"0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd\": container with ID starting with 0cd735c6bc6a4da0cc08f1dd83aab9528ceb8706e11055d97551480bc74fa5cd not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.227905 4838 scope.go:117] "RemoveContainer" containerID="56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.228117 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d"} err="failed to get container status \"56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d\": rpc error: code = NotFound desc = could not find container \"56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d\": container with ID starting with 56cb786853f1f165bfbc6d699fc7e0cfca9f81741a90fa9628951f9b4c2c649d not found: ID does not exist" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.515011 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9bc00b9c-6e31-4f8e-b4ba-44150281ed69" path="/var/lib/kubelet/pods/9bc00b9c-6e31-4f8e-b4ba-44150281ed69/volumes" Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.997897 4838 generic.go:334] "Generic (PLEG): container finished" podID="3681cf6d-4236-4114-8d62-c68052318719" containerID="44a37e10eda08e7cb50f676a34b82cb1b7e32ad35e6ac8c22b3c6c3dc1dfad60" exitCode=0 Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.998010 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" event={"ID":"3681cf6d-4236-4114-8d62-c68052318719","Type":"ContainerDied","Data":"44a37e10eda08e7cb50f676a34b82cb1b7e32ad35e6ac8c22b3c6c3dc1dfad60"} Feb 02 11:04:32 crc kubenswrapper[4838]: I0202 11:04:32.998059 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" event={"ID":"3681cf6d-4236-4114-8d62-c68052318719","Type":"ContainerStarted","Data":"4c05bd3e143ad2dfd3966839227f554915e039be3d414fcf54d887c3e0b7422c"} Feb 02 11:04:34 crc kubenswrapper[4838]: I0202 11:04:34.010308 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" event={"ID":"3681cf6d-4236-4114-8d62-c68052318719","Type":"ContainerStarted","Data":"38b7243f9f8a423c5505a3155424bb772aa99a69f2605eb231b683304606d218"} Feb 02 11:04:34 crc kubenswrapper[4838]: I0202 11:04:34.010964 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" event={"ID":"3681cf6d-4236-4114-8d62-c68052318719","Type":"ContainerStarted","Data":"59325eb8f3f046e4944f21df13139b2a44825f5bec769fb275095aaf12995b97"} Feb 02 11:04:34 crc kubenswrapper[4838]: I0202 11:04:34.010976 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" event={"ID":"3681cf6d-4236-4114-8d62-c68052318719","Type":"ContainerStarted","Data":"d08d56910e142aa0c80ff59ea0611c31129ee48ceda70e7a7b9831f1434863d5"} Feb 02 11:04:34 crc kubenswrapper[4838]: I0202 11:04:34.010986 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" event={"ID":"3681cf6d-4236-4114-8d62-c68052318719","Type":"ContainerStarted","Data":"eb22f27634e01d47726d3a3d9bdd19770ec4c7bdbd77ded9c3a7e2f029c92133"} Feb 02 11:04:35 crc kubenswrapper[4838]: I0202 11:04:35.025337 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" event={"ID":"3681cf6d-4236-4114-8d62-c68052318719","Type":"ContainerStarted","Data":"f6642cfe41ad6ad003b7554467d6b30c54e4d3c231cd0f678ab4bac0dd25c25b"} Feb 02 11:04:35 crc kubenswrapper[4838]: I0202 11:04:35.025414 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" event={"ID":"3681cf6d-4236-4114-8d62-c68052318719","Type":"ContainerStarted","Data":"23310747f24cef0d9bb2308407e423002ce9c68ce9fd9f0db1f1a1e641521a4a"} Feb 02 11:04:36 crc kubenswrapper[4838]: I0202 11:04:36.086154 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-687f57d79b-tvmg6" Feb 02 11:04:37 crc kubenswrapper[4838]: I0202 11:04:37.051400 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" event={"ID":"3681cf6d-4236-4114-8d62-c68052318719","Type":"ContainerStarted","Data":"35ecb288a7a549bb3adbdfd1b10c745a9e47a4554f16a2be01cc35781716480e"} Feb 02 11:04:39 crc kubenswrapper[4838]: I0202 11:04:39.077396 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" event={"ID":"3681cf6d-4236-4114-8d62-c68052318719","Type":"ContainerStarted","Data":"0a0c65de05555c5e439e7615426f94e623b41dd92e65a3ae680994d5f221c938"} Feb 02 11:04:39 crc kubenswrapper[4838]: I0202 11:04:39.078182 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:39 crc kubenswrapper[4838]: I0202 11:04:39.078386 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:39 crc kubenswrapper[4838]: I0202 11:04:39.078465 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:39 crc kubenswrapper[4838]: I0202 11:04:39.107922 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" podStartSLOduration=8.107904996 podStartE2EDuration="8.107904996s" podCreationTimestamp="2026-02-02 11:04:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:04:39.106250522 +0000 UTC m=+673.443351570" watchObservedRunningTime="2026-02-02 11:04:39.107904996 +0000 UTC m=+673.445006024" Feb 02 11:04:39 crc kubenswrapper[4838]: I0202 11:04:39.121692 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:39 crc kubenswrapper[4838]: I0202 11:04:39.122689 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:04:46 crc kubenswrapper[4838]: I0202 11:04:46.511566 4838 scope.go:117] "RemoveContainer" containerID="2ad4ae78e0719cb061a0ffafcdac325d6c137147b66ef29798b0a50c30341efc" Feb 02 11:04:46 crc kubenswrapper[4838]: E0202 11:04:46.512281 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-ndxhv_openshift-multus(ddc2e893-5801-4e73-a5f6-9cc52f733f49)\"" pod="openshift-multus/multus-ndxhv" podUID="ddc2e893-5801-4e73-a5f6-9cc52f733f49" Feb 02 11:04:59 crc kubenswrapper[4838]: I0202 11:04:59.506315 4838 scope.go:117] "RemoveContainer" containerID="2ad4ae78e0719cb061a0ffafcdac325d6c137147b66ef29798b0a50c30341efc" Feb 02 11:05:00 crc kubenswrapper[4838]: I0202 11:05:00.219829 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ndxhv_ddc2e893-5801-4e73-a5f6-9cc52f733f49/kube-multus/2.log" Feb 02 11:05:00 crc kubenswrapper[4838]: I0202 11:05:00.220990 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ndxhv_ddc2e893-5801-4e73-a5f6-9cc52f733f49/kube-multus/1.log" Feb 02 11:05:00 crc kubenswrapper[4838]: I0202 11:05:00.221104 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ndxhv" event={"ID":"ddc2e893-5801-4e73-a5f6-9cc52f733f49","Type":"ContainerStarted","Data":"ee8fb6e8c911805e3861f423d03fa7b89c7dc4e53e7587a08d8bb439c192ec5f"} Feb 02 11:05:02 crc kubenswrapper[4838]: I0202 11:05:02.167110 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-gttjk" Feb 02 11:05:14 crc kubenswrapper[4838]: I0202 11:05:14.109072 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl"] Feb 02 11:05:14 crc kubenswrapper[4838]: I0202 11:05:14.111866 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl" Feb 02 11:05:14 crc kubenswrapper[4838]: I0202 11:05:14.114036 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 02 11:05:14 crc kubenswrapper[4838]: I0202 11:05:14.123285 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl"] Feb 02 11:05:14 crc kubenswrapper[4838]: I0202 11:05:14.158255 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zs788\" (UniqueName: \"kubernetes.io/projected/a250321d-30c9-426a-b638-90dd5e9c036d-kube-api-access-zs788\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl\" (UID: \"a250321d-30c9-426a-b638-90dd5e9c036d\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl" Feb 02 11:05:14 crc kubenswrapper[4838]: I0202 11:05:14.158307 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a250321d-30c9-426a-b638-90dd5e9c036d-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl\" (UID: \"a250321d-30c9-426a-b638-90dd5e9c036d\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl" Feb 02 11:05:14 crc kubenswrapper[4838]: I0202 11:05:14.158331 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a250321d-30c9-426a-b638-90dd5e9c036d-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl\" (UID: \"a250321d-30c9-426a-b638-90dd5e9c036d\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl" Feb 02 11:05:14 crc kubenswrapper[4838]: I0202 11:05:14.259223 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zs788\" (UniqueName: \"kubernetes.io/projected/a250321d-30c9-426a-b638-90dd5e9c036d-kube-api-access-zs788\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl\" (UID: \"a250321d-30c9-426a-b638-90dd5e9c036d\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl" Feb 02 11:05:14 crc kubenswrapper[4838]: I0202 11:05:14.259297 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a250321d-30c9-426a-b638-90dd5e9c036d-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl\" (UID: \"a250321d-30c9-426a-b638-90dd5e9c036d\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl" Feb 02 11:05:14 crc kubenswrapper[4838]: I0202 11:05:14.259328 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a250321d-30c9-426a-b638-90dd5e9c036d-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl\" (UID: \"a250321d-30c9-426a-b638-90dd5e9c036d\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl" Feb 02 11:05:14 crc kubenswrapper[4838]: I0202 11:05:14.259998 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a250321d-30c9-426a-b638-90dd5e9c036d-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl\" (UID: \"a250321d-30c9-426a-b638-90dd5e9c036d\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl" Feb 02 11:05:14 crc kubenswrapper[4838]: I0202 11:05:14.260026 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a250321d-30c9-426a-b638-90dd5e9c036d-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl\" (UID: \"a250321d-30c9-426a-b638-90dd5e9c036d\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl" Feb 02 11:05:14 crc kubenswrapper[4838]: I0202 11:05:14.285800 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zs788\" (UniqueName: \"kubernetes.io/projected/a250321d-30c9-426a-b638-90dd5e9c036d-kube-api-access-zs788\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl\" (UID: \"a250321d-30c9-426a-b638-90dd5e9c036d\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl" Feb 02 11:05:14 crc kubenswrapper[4838]: I0202 11:05:14.431268 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl" Feb 02 11:05:14 crc kubenswrapper[4838]: I0202 11:05:14.894596 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl"] Feb 02 11:05:14 crc kubenswrapper[4838]: W0202 11:05:14.905719 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda250321d_30c9_426a_b638_90dd5e9c036d.slice/crio-686df569f0528cfa8bd0ab2daacbc0dba93d93f5f9a224a8358decbe96bdeaa3 WatchSource:0}: Error finding container 686df569f0528cfa8bd0ab2daacbc0dba93d93f5f9a224a8358decbe96bdeaa3: Status 404 returned error can't find the container with id 686df569f0528cfa8bd0ab2daacbc0dba93d93f5f9a224a8358decbe96bdeaa3 Feb 02 11:05:15 crc kubenswrapper[4838]: I0202 11:05:15.328413 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl" event={"ID":"a250321d-30c9-426a-b638-90dd5e9c036d","Type":"ContainerStarted","Data":"d8c8b25be6bcc231cea6d7c445d19319e5f1a6255d3154d264e6cfdc082136b4"} Feb 02 11:05:15 crc kubenswrapper[4838]: I0202 11:05:15.328973 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl" event={"ID":"a250321d-30c9-426a-b638-90dd5e9c036d","Type":"ContainerStarted","Data":"686df569f0528cfa8bd0ab2daacbc0dba93d93f5f9a224a8358decbe96bdeaa3"} Feb 02 11:05:15 crc kubenswrapper[4838]: E0202 11:05:15.782195 4838 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda250321d_30c9_426a_b638_90dd5e9c036d.slice/crio-conmon-d8c8b25be6bcc231cea6d7c445d19319e5f1a6255d3154d264e6cfdc082136b4.scope\": RecentStats: unable to find data in memory cache]" Feb 02 11:05:16 crc kubenswrapper[4838]: I0202 11:05:16.335223 4838 generic.go:334] "Generic (PLEG): container finished" podID="a250321d-30c9-426a-b638-90dd5e9c036d" containerID="d8c8b25be6bcc231cea6d7c445d19319e5f1a6255d3154d264e6cfdc082136b4" exitCode=0 Feb 02 11:05:16 crc kubenswrapper[4838]: I0202 11:05:16.335280 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl" event={"ID":"a250321d-30c9-426a-b638-90dd5e9c036d","Type":"ContainerDied","Data":"d8c8b25be6bcc231cea6d7c445d19319e5f1a6255d3154d264e6cfdc082136b4"} Feb 02 11:05:18 crc kubenswrapper[4838]: I0202 11:05:18.350347 4838 generic.go:334] "Generic (PLEG): container finished" podID="a250321d-30c9-426a-b638-90dd5e9c036d" containerID="1f6edb317e2a64cdea5e5317a3e92fee709241478fbb2331491c4bfb154c8a6f" exitCode=0 Feb 02 11:05:18 crc kubenswrapper[4838]: I0202 11:05:18.350453 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl" event={"ID":"a250321d-30c9-426a-b638-90dd5e9c036d","Type":"ContainerDied","Data":"1f6edb317e2a64cdea5e5317a3e92fee709241478fbb2331491c4bfb154c8a6f"} Feb 02 11:05:19 crc kubenswrapper[4838]: I0202 11:05:19.360978 4838 generic.go:334] "Generic (PLEG): container finished" podID="a250321d-30c9-426a-b638-90dd5e9c036d" containerID="15a56702f9e9496e5243bae251d26e1d4b64c4bc404e61f136bd655aedb148d0" exitCode=0 Feb 02 11:05:19 crc kubenswrapper[4838]: I0202 11:05:19.361052 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl" event={"ID":"a250321d-30c9-426a-b638-90dd5e9c036d","Type":"ContainerDied","Data":"15a56702f9e9496e5243bae251d26e1d4b64c4bc404e61f136bd655aedb148d0"} Feb 02 11:05:20 crc kubenswrapper[4838]: I0202 11:05:20.627247 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl" Feb 02 11:05:20 crc kubenswrapper[4838]: I0202 11:05:20.821463 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zs788\" (UniqueName: \"kubernetes.io/projected/a250321d-30c9-426a-b638-90dd5e9c036d-kube-api-access-zs788\") pod \"a250321d-30c9-426a-b638-90dd5e9c036d\" (UID: \"a250321d-30c9-426a-b638-90dd5e9c036d\") " Feb 02 11:05:20 crc kubenswrapper[4838]: I0202 11:05:20.821610 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a250321d-30c9-426a-b638-90dd5e9c036d-bundle\") pod \"a250321d-30c9-426a-b638-90dd5e9c036d\" (UID: \"a250321d-30c9-426a-b638-90dd5e9c036d\") " Feb 02 11:05:20 crc kubenswrapper[4838]: I0202 11:05:20.821686 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a250321d-30c9-426a-b638-90dd5e9c036d-util\") pod \"a250321d-30c9-426a-b638-90dd5e9c036d\" (UID: \"a250321d-30c9-426a-b638-90dd5e9c036d\") " Feb 02 11:05:20 crc kubenswrapper[4838]: I0202 11:05:20.822212 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a250321d-30c9-426a-b638-90dd5e9c036d-bundle" (OuterVolumeSpecName: "bundle") pod "a250321d-30c9-426a-b638-90dd5e9c036d" (UID: "a250321d-30c9-426a-b638-90dd5e9c036d"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:05:20 crc kubenswrapper[4838]: I0202 11:05:20.830102 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a250321d-30c9-426a-b638-90dd5e9c036d-kube-api-access-zs788" (OuterVolumeSpecName: "kube-api-access-zs788") pod "a250321d-30c9-426a-b638-90dd5e9c036d" (UID: "a250321d-30c9-426a-b638-90dd5e9c036d"). InnerVolumeSpecName "kube-api-access-zs788". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:05:20 crc kubenswrapper[4838]: I0202 11:05:20.842133 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a250321d-30c9-426a-b638-90dd5e9c036d-util" (OuterVolumeSpecName: "util") pod "a250321d-30c9-426a-b638-90dd5e9c036d" (UID: "a250321d-30c9-426a-b638-90dd5e9c036d"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:05:20 crc kubenswrapper[4838]: I0202 11:05:20.923282 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zs788\" (UniqueName: \"kubernetes.io/projected/a250321d-30c9-426a-b638-90dd5e9c036d-kube-api-access-zs788\") on node \"crc\" DevicePath \"\"" Feb 02 11:05:20 crc kubenswrapper[4838]: I0202 11:05:20.923336 4838 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a250321d-30c9-426a-b638-90dd5e9c036d-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:05:20 crc kubenswrapper[4838]: I0202 11:05:20.923354 4838 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a250321d-30c9-426a-b638-90dd5e9c036d-util\") on node \"crc\" DevicePath \"\"" Feb 02 11:05:21 crc kubenswrapper[4838]: I0202 11:05:21.380909 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl" event={"ID":"a250321d-30c9-426a-b638-90dd5e9c036d","Type":"ContainerDied","Data":"686df569f0528cfa8bd0ab2daacbc0dba93d93f5f9a224a8358decbe96bdeaa3"} Feb 02 11:05:21 crc kubenswrapper[4838]: I0202 11:05:21.381011 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="686df569f0528cfa8bd0ab2daacbc0dba93d93f5f9a224a8358decbe96bdeaa3" Feb 02 11:05:21 crc kubenswrapper[4838]: I0202 11:05:21.381021 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl" Feb 02 11:05:22 crc kubenswrapper[4838]: I0202 11:05:22.569084 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-bggr6"] Feb 02 11:05:22 crc kubenswrapper[4838]: E0202 11:05:22.569675 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a250321d-30c9-426a-b638-90dd5e9c036d" containerName="util" Feb 02 11:05:22 crc kubenswrapper[4838]: I0202 11:05:22.569689 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="a250321d-30c9-426a-b638-90dd5e9c036d" containerName="util" Feb 02 11:05:22 crc kubenswrapper[4838]: E0202 11:05:22.569701 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a250321d-30c9-426a-b638-90dd5e9c036d" containerName="pull" Feb 02 11:05:22 crc kubenswrapper[4838]: I0202 11:05:22.569708 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="a250321d-30c9-426a-b638-90dd5e9c036d" containerName="pull" Feb 02 11:05:22 crc kubenswrapper[4838]: E0202 11:05:22.569719 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a250321d-30c9-426a-b638-90dd5e9c036d" containerName="extract" Feb 02 11:05:22 crc kubenswrapper[4838]: I0202 11:05:22.569727 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="a250321d-30c9-426a-b638-90dd5e9c036d" containerName="extract" Feb 02 11:05:22 crc kubenswrapper[4838]: I0202 11:05:22.569841 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="a250321d-30c9-426a-b638-90dd5e9c036d" containerName="extract" Feb 02 11:05:22 crc kubenswrapper[4838]: I0202 11:05:22.570310 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-bggr6" Feb 02 11:05:22 crc kubenswrapper[4838]: I0202 11:05:22.577564 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Feb 02 11:05:22 crc kubenswrapper[4838]: I0202 11:05:22.581174 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Feb 02 11:05:22 crc kubenswrapper[4838]: I0202 11:05:22.585543 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-bggr6"] Feb 02 11:05:22 crc kubenswrapper[4838]: I0202 11:05:22.587240 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-n2vlq" Feb 02 11:05:22 crc kubenswrapper[4838]: I0202 11:05:22.746715 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvmdb\" (UniqueName: \"kubernetes.io/projected/eac05e66-32ef-40ab-833b-ffdb87e12159-kube-api-access-dvmdb\") pod \"nmstate-operator-646758c888-bggr6\" (UID: \"eac05e66-32ef-40ab-833b-ffdb87e12159\") " pod="openshift-nmstate/nmstate-operator-646758c888-bggr6" Feb 02 11:05:22 crc kubenswrapper[4838]: I0202 11:05:22.848343 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvmdb\" (UniqueName: \"kubernetes.io/projected/eac05e66-32ef-40ab-833b-ffdb87e12159-kube-api-access-dvmdb\") pod \"nmstate-operator-646758c888-bggr6\" (UID: \"eac05e66-32ef-40ab-833b-ffdb87e12159\") " pod="openshift-nmstate/nmstate-operator-646758c888-bggr6" Feb 02 11:05:22 crc kubenswrapper[4838]: I0202 11:05:22.872830 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvmdb\" (UniqueName: \"kubernetes.io/projected/eac05e66-32ef-40ab-833b-ffdb87e12159-kube-api-access-dvmdb\") pod \"nmstate-operator-646758c888-bggr6\" (UID: \"eac05e66-32ef-40ab-833b-ffdb87e12159\") " pod="openshift-nmstate/nmstate-operator-646758c888-bggr6" Feb 02 11:05:22 crc kubenswrapper[4838]: I0202 11:05:22.890403 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-bggr6" Feb 02 11:05:23 crc kubenswrapper[4838]: I0202 11:05:23.131019 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-bggr6"] Feb 02 11:05:23 crc kubenswrapper[4838]: W0202 11:05:23.139157 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeac05e66_32ef_40ab_833b_ffdb87e12159.slice/crio-eff31030ce2703a7051266021e5dd708bf2e1b25fc7dcd89d15c596733bd9be1 WatchSource:0}: Error finding container eff31030ce2703a7051266021e5dd708bf2e1b25fc7dcd89d15c596733bd9be1: Status 404 returned error can't find the container with id eff31030ce2703a7051266021e5dd708bf2e1b25fc7dcd89d15c596733bd9be1 Feb 02 11:05:23 crc kubenswrapper[4838]: I0202 11:05:23.392397 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-bggr6" event={"ID":"eac05e66-32ef-40ab-833b-ffdb87e12159","Type":"ContainerStarted","Data":"eff31030ce2703a7051266021e5dd708bf2e1b25fc7dcd89d15c596733bd9be1"} Feb 02 11:05:26 crc kubenswrapper[4838]: I0202 11:05:26.414137 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-bggr6" event={"ID":"eac05e66-32ef-40ab-833b-ffdb87e12159","Type":"ContainerStarted","Data":"3c162a14e05388446a7c0651635347bf5cef14d524ecbbe4c8448915b43e9d6f"} Feb 02 11:05:26 crc kubenswrapper[4838]: I0202 11:05:26.441443 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-646758c888-bggr6" podStartSLOduration=2.069557806 podStartE2EDuration="4.441419716s" podCreationTimestamp="2026-02-02 11:05:22 +0000 UTC" firstStartedPulling="2026-02-02 11:05:23.142599589 +0000 UTC m=+717.479700617" lastFinishedPulling="2026-02-02 11:05:25.514461479 +0000 UTC m=+719.851562527" observedRunningTime="2026-02-02 11:05:26.435375275 +0000 UTC m=+720.772476343" watchObservedRunningTime="2026-02-02 11:05:26.441419716 +0000 UTC m=+720.778520744" Feb 02 11:05:26 crc kubenswrapper[4838]: I0202 11:05:26.890178 4838 scope.go:117] "RemoveContainer" containerID="94192e5199deb1455cbc4f14f600e68e7432dc6e1b246d174b7a4cc4b4359770" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.421534 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ndxhv_ddc2e893-5801-4e73-a5f6-9cc52f733f49/kube-multus/2.log" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.457541 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-q9xb2"] Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.458307 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-q9xb2" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.472816 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-q9xb2"] Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.476583 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-nwk6k"] Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.477259 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-nwk6k" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.478042 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-qrxx5" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.479525 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.494848 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-nwk6k"] Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.504880 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-ch2vl"] Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.505586 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-ch2vl" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.602791 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-q98cv"] Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.603575 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-q98cv" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.607552 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-ks8hw" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.607798 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.608267 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.609257 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6mpr\" (UniqueName: \"kubernetes.io/projected/539fff34-8b11-42d9-b32f-4c1cab281cf5-kube-api-access-c6mpr\") pod \"nmstate-handler-ch2vl\" (UID: \"539fff34-8b11-42d9-b32f-4c1cab281cf5\") " pod="openshift-nmstate/nmstate-handler-ch2vl" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.609296 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/4cbd203f-2073-4bee-8234-da99cf46562b-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-q98cv\" (UID: \"4cbd203f-2073-4bee-8234-da99cf46562b\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-q98cv" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.609426 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ksvb\" (UniqueName: \"kubernetes.io/projected/c3132ec2-5218-4b6f-8e19-dfce93103b19-kube-api-access-9ksvb\") pod \"nmstate-metrics-54757c584b-q9xb2\" (UID: \"c3132ec2-5218-4b6f-8e19-dfce93103b19\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-q9xb2" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.609463 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b94f73b7-7c9a-4c88-9180-76861894189e-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-nwk6k\" (UID: \"b94f73b7-7c9a-4c88-9180-76861894189e\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-nwk6k" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.609602 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/4cbd203f-2073-4bee-8234-da99cf46562b-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-q98cv\" (UID: \"4cbd203f-2073-4bee-8234-da99cf46562b\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-q98cv" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.609808 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/539fff34-8b11-42d9-b32f-4c1cab281cf5-nmstate-lock\") pod \"nmstate-handler-ch2vl\" (UID: \"539fff34-8b11-42d9-b32f-4c1cab281cf5\") " pod="openshift-nmstate/nmstate-handler-ch2vl" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.609883 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sfd7j\" (UniqueName: \"kubernetes.io/projected/b94f73b7-7c9a-4c88-9180-76861894189e-kube-api-access-sfd7j\") pod \"nmstate-webhook-8474b5b9d8-nwk6k\" (UID: \"b94f73b7-7c9a-4c88-9180-76861894189e\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-nwk6k" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.609926 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzhhq\" (UniqueName: \"kubernetes.io/projected/4cbd203f-2073-4bee-8234-da99cf46562b-kube-api-access-mzhhq\") pod \"nmstate-console-plugin-7754f76f8b-q98cv\" (UID: \"4cbd203f-2073-4bee-8234-da99cf46562b\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-q98cv" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.609952 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/539fff34-8b11-42d9-b32f-4c1cab281cf5-dbus-socket\") pod \"nmstate-handler-ch2vl\" (UID: \"539fff34-8b11-42d9-b32f-4c1cab281cf5\") " pod="openshift-nmstate/nmstate-handler-ch2vl" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.609975 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/539fff34-8b11-42d9-b32f-4c1cab281cf5-ovs-socket\") pod \"nmstate-handler-ch2vl\" (UID: \"539fff34-8b11-42d9-b32f-4c1cab281cf5\") " pod="openshift-nmstate/nmstate-handler-ch2vl" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.613697 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-q98cv"] Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.711187 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzhhq\" (UniqueName: \"kubernetes.io/projected/4cbd203f-2073-4bee-8234-da99cf46562b-kube-api-access-mzhhq\") pod \"nmstate-console-plugin-7754f76f8b-q98cv\" (UID: \"4cbd203f-2073-4bee-8234-da99cf46562b\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-q98cv" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.711263 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/539fff34-8b11-42d9-b32f-4c1cab281cf5-dbus-socket\") pod \"nmstate-handler-ch2vl\" (UID: \"539fff34-8b11-42d9-b32f-4c1cab281cf5\") " pod="openshift-nmstate/nmstate-handler-ch2vl" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.711294 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/539fff34-8b11-42d9-b32f-4c1cab281cf5-ovs-socket\") pod \"nmstate-handler-ch2vl\" (UID: \"539fff34-8b11-42d9-b32f-4c1cab281cf5\") " pod="openshift-nmstate/nmstate-handler-ch2vl" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.711326 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6mpr\" (UniqueName: \"kubernetes.io/projected/539fff34-8b11-42d9-b32f-4c1cab281cf5-kube-api-access-c6mpr\") pod \"nmstate-handler-ch2vl\" (UID: \"539fff34-8b11-42d9-b32f-4c1cab281cf5\") " pod="openshift-nmstate/nmstate-handler-ch2vl" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.711356 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/4cbd203f-2073-4bee-8234-da99cf46562b-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-q98cv\" (UID: \"4cbd203f-2073-4bee-8234-da99cf46562b\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-q98cv" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.711424 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ksvb\" (UniqueName: \"kubernetes.io/projected/c3132ec2-5218-4b6f-8e19-dfce93103b19-kube-api-access-9ksvb\") pod \"nmstate-metrics-54757c584b-q9xb2\" (UID: \"c3132ec2-5218-4b6f-8e19-dfce93103b19\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-q9xb2" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.711454 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b94f73b7-7c9a-4c88-9180-76861894189e-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-nwk6k\" (UID: \"b94f73b7-7c9a-4c88-9180-76861894189e\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-nwk6k" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.711510 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/4cbd203f-2073-4bee-8234-da99cf46562b-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-q98cv\" (UID: \"4cbd203f-2073-4bee-8234-da99cf46562b\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-q98cv" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.711563 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/539fff34-8b11-42d9-b32f-4c1cab281cf5-nmstate-lock\") pod \"nmstate-handler-ch2vl\" (UID: \"539fff34-8b11-42d9-b32f-4c1cab281cf5\") " pod="openshift-nmstate/nmstate-handler-ch2vl" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.711605 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sfd7j\" (UniqueName: \"kubernetes.io/projected/b94f73b7-7c9a-4c88-9180-76861894189e-kube-api-access-sfd7j\") pod \"nmstate-webhook-8474b5b9d8-nwk6k\" (UID: \"b94f73b7-7c9a-4c88-9180-76861894189e\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-nwk6k" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.711874 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/539fff34-8b11-42d9-b32f-4c1cab281cf5-dbus-socket\") pod \"nmstate-handler-ch2vl\" (UID: \"539fff34-8b11-42d9-b32f-4c1cab281cf5\") " pod="openshift-nmstate/nmstate-handler-ch2vl" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.711921 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/539fff34-8b11-42d9-b32f-4c1cab281cf5-ovs-socket\") pod \"nmstate-handler-ch2vl\" (UID: \"539fff34-8b11-42d9-b32f-4c1cab281cf5\") " pod="openshift-nmstate/nmstate-handler-ch2vl" Feb 02 11:05:27 crc kubenswrapper[4838]: E0202 11:05:27.711980 4838 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Feb 02 11:05:27 crc kubenswrapper[4838]: E0202 11:05:27.712018 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b94f73b7-7c9a-4c88-9180-76861894189e-tls-key-pair podName:b94f73b7-7c9a-4c88-9180-76861894189e nodeName:}" failed. No retries permitted until 2026-02-02 11:05:28.212003265 +0000 UTC m=+722.549104293 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/b94f73b7-7c9a-4c88-9180-76861894189e-tls-key-pair") pod "nmstate-webhook-8474b5b9d8-nwk6k" (UID: "b94f73b7-7c9a-4c88-9180-76861894189e") : secret "openshift-nmstate-webhook" not found Feb 02 11:05:27 crc kubenswrapper[4838]: E0202 11:05:27.712114 4838 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Feb 02 11:05:27 crc kubenswrapper[4838]: E0202 11:05:27.712182 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4cbd203f-2073-4bee-8234-da99cf46562b-plugin-serving-cert podName:4cbd203f-2073-4bee-8234-da99cf46562b nodeName:}" failed. No retries permitted until 2026-02-02 11:05:28.21215711 +0000 UTC m=+722.549258258 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/4cbd203f-2073-4bee-8234-da99cf46562b-plugin-serving-cert") pod "nmstate-console-plugin-7754f76f8b-q98cv" (UID: "4cbd203f-2073-4bee-8234-da99cf46562b") : secret "plugin-serving-cert" not found Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.712238 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/539fff34-8b11-42d9-b32f-4c1cab281cf5-nmstate-lock\") pod \"nmstate-handler-ch2vl\" (UID: \"539fff34-8b11-42d9-b32f-4c1cab281cf5\") " pod="openshift-nmstate/nmstate-handler-ch2vl" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.712674 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/4cbd203f-2073-4bee-8234-da99cf46562b-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-q98cv\" (UID: \"4cbd203f-2073-4bee-8234-da99cf46562b\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-q98cv" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.735838 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sfd7j\" (UniqueName: \"kubernetes.io/projected/b94f73b7-7c9a-4c88-9180-76861894189e-kube-api-access-sfd7j\") pod \"nmstate-webhook-8474b5b9d8-nwk6k\" (UID: \"b94f73b7-7c9a-4c88-9180-76861894189e\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-nwk6k" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.740238 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ksvb\" (UniqueName: \"kubernetes.io/projected/c3132ec2-5218-4b6f-8e19-dfce93103b19-kube-api-access-9ksvb\") pod \"nmstate-metrics-54757c584b-q9xb2\" (UID: \"c3132ec2-5218-4b6f-8e19-dfce93103b19\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-q9xb2" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.742081 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6mpr\" (UniqueName: \"kubernetes.io/projected/539fff34-8b11-42d9-b32f-4c1cab281cf5-kube-api-access-c6mpr\") pod \"nmstate-handler-ch2vl\" (UID: \"539fff34-8b11-42d9-b32f-4c1cab281cf5\") " pod="openshift-nmstate/nmstate-handler-ch2vl" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.755232 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzhhq\" (UniqueName: \"kubernetes.io/projected/4cbd203f-2073-4bee-8234-da99cf46562b-kube-api-access-mzhhq\") pod \"nmstate-console-plugin-7754f76f8b-q98cv\" (UID: \"4cbd203f-2073-4bee-8234-da99cf46562b\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-q98cv" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.783509 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-q9xb2" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.808097 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-595664cbc7-k6qgg"] Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.808718 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.826077 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-ch2vl" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.841114 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-595664cbc7-k6qgg"] Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.914347 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/736d0901-485c-4e90-9259-4a42d265916f-trusted-ca-bundle\") pod \"console-595664cbc7-k6qgg\" (UID: \"736d0901-485c-4e90-9259-4a42d265916f\") " pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.914401 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/736d0901-485c-4e90-9259-4a42d265916f-console-config\") pod \"console-595664cbc7-k6qgg\" (UID: \"736d0901-485c-4e90-9259-4a42d265916f\") " pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.914452 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/736d0901-485c-4e90-9259-4a42d265916f-console-oauth-config\") pod \"console-595664cbc7-k6qgg\" (UID: \"736d0901-485c-4e90-9259-4a42d265916f\") " pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.914784 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/736d0901-485c-4e90-9259-4a42d265916f-oauth-serving-cert\") pod \"console-595664cbc7-k6qgg\" (UID: \"736d0901-485c-4e90-9259-4a42d265916f\") " pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.915107 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/736d0901-485c-4e90-9259-4a42d265916f-service-ca\") pod \"console-595664cbc7-k6qgg\" (UID: \"736d0901-485c-4e90-9259-4a42d265916f\") " pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.915155 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ft5lk\" (UniqueName: \"kubernetes.io/projected/736d0901-485c-4e90-9259-4a42d265916f-kube-api-access-ft5lk\") pod \"console-595664cbc7-k6qgg\" (UID: \"736d0901-485c-4e90-9259-4a42d265916f\") " pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:27 crc kubenswrapper[4838]: I0202 11:05:27.915208 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/736d0901-485c-4e90-9259-4a42d265916f-console-serving-cert\") pod \"console-595664cbc7-k6qgg\" (UID: \"736d0901-485c-4e90-9259-4a42d265916f\") " pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.016079 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/736d0901-485c-4e90-9259-4a42d265916f-trusted-ca-bundle\") pod \"console-595664cbc7-k6qgg\" (UID: \"736d0901-485c-4e90-9259-4a42d265916f\") " pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.016138 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/736d0901-485c-4e90-9259-4a42d265916f-console-config\") pod \"console-595664cbc7-k6qgg\" (UID: \"736d0901-485c-4e90-9259-4a42d265916f\") " pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.016199 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/736d0901-485c-4e90-9259-4a42d265916f-console-oauth-config\") pod \"console-595664cbc7-k6qgg\" (UID: \"736d0901-485c-4e90-9259-4a42d265916f\") " pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.016246 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/736d0901-485c-4e90-9259-4a42d265916f-oauth-serving-cert\") pod \"console-595664cbc7-k6qgg\" (UID: \"736d0901-485c-4e90-9259-4a42d265916f\") " pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.016282 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/736d0901-485c-4e90-9259-4a42d265916f-service-ca\") pod \"console-595664cbc7-k6qgg\" (UID: \"736d0901-485c-4e90-9259-4a42d265916f\") " pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.016301 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ft5lk\" (UniqueName: \"kubernetes.io/projected/736d0901-485c-4e90-9259-4a42d265916f-kube-api-access-ft5lk\") pod \"console-595664cbc7-k6qgg\" (UID: \"736d0901-485c-4e90-9259-4a42d265916f\") " pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.016325 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/736d0901-485c-4e90-9259-4a42d265916f-console-serving-cert\") pod \"console-595664cbc7-k6qgg\" (UID: \"736d0901-485c-4e90-9259-4a42d265916f\") " pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.017220 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/736d0901-485c-4e90-9259-4a42d265916f-console-config\") pod \"console-595664cbc7-k6qgg\" (UID: \"736d0901-485c-4e90-9259-4a42d265916f\") " pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.017378 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/736d0901-485c-4e90-9259-4a42d265916f-trusted-ca-bundle\") pod \"console-595664cbc7-k6qgg\" (UID: \"736d0901-485c-4e90-9259-4a42d265916f\") " pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.017608 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/736d0901-485c-4e90-9259-4a42d265916f-service-ca\") pod \"console-595664cbc7-k6qgg\" (UID: \"736d0901-485c-4e90-9259-4a42d265916f\") " pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.019532 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/736d0901-485c-4e90-9259-4a42d265916f-oauth-serving-cert\") pod \"console-595664cbc7-k6qgg\" (UID: \"736d0901-485c-4e90-9259-4a42d265916f\") " pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.021143 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/736d0901-485c-4e90-9259-4a42d265916f-console-serving-cert\") pod \"console-595664cbc7-k6qgg\" (UID: \"736d0901-485c-4e90-9259-4a42d265916f\") " pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.031769 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/736d0901-485c-4e90-9259-4a42d265916f-console-oauth-config\") pod \"console-595664cbc7-k6qgg\" (UID: \"736d0901-485c-4e90-9259-4a42d265916f\") " pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.033794 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ft5lk\" (UniqueName: \"kubernetes.io/projected/736d0901-485c-4e90-9259-4a42d265916f-kube-api-access-ft5lk\") pod \"console-595664cbc7-k6qgg\" (UID: \"736d0901-485c-4e90-9259-4a42d265916f\") " pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.166804 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.218459 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/4cbd203f-2073-4bee-8234-da99cf46562b-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-q98cv\" (UID: \"4cbd203f-2073-4bee-8234-da99cf46562b\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-q98cv" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.218557 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b94f73b7-7c9a-4c88-9180-76861894189e-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-nwk6k\" (UID: \"b94f73b7-7c9a-4c88-9180-76861894189e\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-nwk6k" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.222163 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b94f73b7-7c9a-4c88-9180-76861894189e-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-nwk6k\" (UID: \"b94f73b7-7c9a-4c88-9180-76861894189e\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-nwk6k" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.222807 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/4cbd203f-2073-4bee-8234-da99cf46562b-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-q98cv\" (UID: \"4cbd203f-2073-4bee-8234-da99cf46562b\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-q98cv" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.267660 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-q9xb2"] Feb 02 11:05:28 crc kubenswrapper[4838]: W0202 11:05:28.273944 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc3132ec2_5218_4b6f_8e19_dfce93103b19.slice/crio-4beb03fddd98c16fefe2321986d5e0a0955d313ffd65a4a7135d7198cc1e2299 WatchSource:0}: Error finding container 4beb03fddd98c16fefe2321986d5e0a0955d313ffd65a4a7135d7198cc1e2299: Status 404 returned error can't find the container with id 4beb03fddd98c16fefe2321986d5e0a0955d313ffd65a4a7135d7198cc1e2299 Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.353754 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-595664cbc7-k6qgg"] Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.404469 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-nwk6k" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.425882 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-595664cbc7-k6qgg" event={"ID":"736d0901-485c-4e90-9259-4a42d265916f","Type":"ContainerStarted","Data":"234346a1d554a3729ad4a13469f2161a6494d1f271da67150684aecda3477b3e"} Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.427630 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-ch2vl" event={"ID":"539fff34-8b11-42d9-b32f-4c1cab281cf5","Type":"ContainerStarted","Data":"a4643db5b549cf11f42ff612c2497f99b050a222f7d46e714d0d8e0009fd5718"} Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.428307 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-q9xb2" event={"ID":"c3132ec2-5218-4b6f-8e19-dfce93103b19","Type":"ContainerStarted","Data":"4beb03fddd98c16fefe2321986d5e0a0955d313ffd65a4a7135d7198cc1e2299"} Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.521282 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-q98cv" Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.600107 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-nwk6k"] Feb 02 11:05:28 crc kubenswrapper[4838]: W0202 11:05:28.620694 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb94f73b7_7c9a_4c88_9180_76861894189e.slice/crio-6300244a1ca7f46b65ab5a49c981fe65f1a62d05eb6fca1bd98446d9968bc134 WatchSource:0}: Error finding container 6300244a1ca7f46b65ab5a49c981fe65f1a62d05eb6fca1bd98446d9968bc134: Status 404 returned error can't find the container with id 6300244a1ca7f46b65ab5a49c981fe65f1a62d05eb6fca1bd98446d9968bc134 Feb 02 11:05:28 crc kubenswrapper[4838]: I0202 11:05:28.742914 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-q98cv"] Feb 02 11:05:29 crc kubenswrapper[4838]: I0202 11:05:29.434075 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-595664cbc7-k6qgg" event={"ID":"736d0901-485c-4e90-9259-4a42d265916f","Type":"ContainerStarted","Data":"3248dde405b4e55206742aba38cf01a88dffda01ac260143614579df4d836210"} Feb 02 11:05:29 crc kubenswrapper[4838]: I0202 11:05:29.437869 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-q98cv" event={"ID":"4cbd203f-2073-4bee-8234-da99cf46562b","Type":"ContainerStarted","Data":"d79507bf55b6c6b03c627e2f7259fee99d46953cd89a4f0bffe89549579ca3ce"} Feb 02 11:05:29 crc kubenswrapper[4838]: I0202 11:05:29.439581 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-nwk6k" event={"ID":"b94f73b7-7c9a-4c88-9180-76861894189e","Type":"ContainerStarted","Data":"6300244a1ca7f46b65ab5a49c981fe65f1a62d05eb6fca1bd98446d9968bc134"} Feb 02 11:05:29 crc kubenswrapper[4838]: I0202 11:05:29.455900 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-595664cbc7-k6qgg" podStartSLOduration=2.45588648 podStartE2EDuration="2.45588648s" podCreationTimestamp="2026-02-02 11:05:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:05:29.453137737 +0000 UTC m=+723.790238775" watchObservedRunningTime="2026-02-02 11:05:29.45588648 +0000 UTC m=+723.792987508" Feb 02 11:05:31 crc kubenswrapper[4838]: I0202 11:05:31.462608 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-nwk6k" event={"ID":"b94f73b7-7c9a-4c88-9180-76861894189e","Type":"ContainerStarted","Data":"2ced5e90b89884a9b981101e116bffba31d193443cef2178c3ffe6868f328b2d"} Feb 02 11:05:31 crc kubenswrapper[4838]: I0202 11:05:31.463182 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-nwk6k" Feb 02 11:05:31 crc kubenswrapper[4838]: I0202 11:05:31.464065 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-ch2vl" event={"ID":"539fff34-8b11-42d9-b32f-4c1cab281cf5","Type":"ContainerStarted","Data":"50dfe44e8cf0d9af8e3e271b7273c6cb3e3f99759b8e8d3ccca5f73b78bb626f"} Feb 02 11:05:31 crc kubenswrapper[4838]: I0202 11:05:31.464124 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-ch2vl" Feb 02 11:05:31 crc kubenswrapper[4838]: I0202 11:05:31.466339 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-q9xb2" event={"ID":"c3132ec2-5218-4b6f-8e19-dfce93103b19","Type":"ContainerStarted","Data":"874ad29ef402c4855c1efdb6b2dadbda65663c73cee6fa6a67f728cea7034998"} Feb 02 11:05:31 crc kubenswrapper[4838]: I0202 11:05:31.496371 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-ch2vl" podStartSLOduration=2.077894533 podStartE2EDuration="4.496355761s" podCreationTimestamp="2026-02-02 11:05:27 +0000 UTC" firstStartedPulling="2026-02-02 11:05:27.868081425 +0000 UTC m=+722.205182453" lastFinishedPulling="2026-02-02 11:05:30.286542623 +0000 UTC m=+724.623643681" observedRunningTime="2026-02-02 11:05:31.49594772 +0000 UTC m=+725.833048758" watchObservedRunningTime="2026-02-02 11:05:31.496355761 +0000 UTC m=+725.833456779" Feb 02 11:05:31 crc kubenswrapper[4838]: I0202 11:05:31.497190 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-nwk6k" podStartSLOduration=2.831909143 podStartE2EDuration="4.497186313s" podCreationTimestamp="2026-02-02 11:05:27 +0000 UTC" firstStartedPulling="2026-02-02 11:05:28.624833837 +0000 UTC m=+722.961934855" lastFinishedPulling="2026-02-02 11:05:30.290110967 +0000 UTC m=+724.627212025" observedRunningTime="2026-02-02 11:05:31.477931742 +0000 UTC m=+725.815032770" watchObservedRunningTime="2026-02-02 11:05:31.497186313 +0000 UTC m=+725.834287351" Feb 02 11:05:32 crc kubenswrapper[4838]: I0202 11:05:32.473044 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-q98cv" event={"ID":"4cbd203f-2073-4bee-8234-da99cf46562b","Type":"ContainerStarted","Data":"9414dcec7163699ab879784e104fa04bc1ea58df3fd1bfb9dc9400e0b363e187"} Feb 02 11:05:32 crc kubenswrapper[4838]: I0202 11:05:32.493924 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-q98cv" podStartSLOduration=2.730033631 podStartE2EDuration="5.49390665s" podCreationTimestamp="2026-02-02 11:05:27 +0000 UTC" firstStartedPulling="2026-02-02 11:05:28.749951266 +0000 UTC m=+723.087052294" lastFinishedPulling="2026-02-02 11:05:31.513824285 +0000 UTC m=+725.850925313" observedRunningTime="2026-02-02 11:05:32.492698998 +0000 UTC m=+726.829800026" watchObservedRunningTime="2026-02-02 11:05:32.49390665 +0000 UTC m=+726.831007678" Feb 02 11:05:33 crc kubenswrapper[4838]: I0202 11:05:33.484997 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-q9xb2" event={"ID":"c3132ec2-5218-4b6f-8e19-dfce93103b19","Type":"ContainerStarted","Data":"77afbdfcbe19a2fa969b278fbf36864326da42a80100f0bcfa3c4b2d1033a058"} Feb 02 11:05:33 crc kubenswrapper[4838]: I0202 11:05:33.530070 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-54757c584b-q9xb2" podStartSLOduration=2.394242475 podStartE2EDuration="6.530037532s" podCreationTimestamp="2026-02-02 11:05:27 +0000 UTC" firstStartedPulling="2026-02-02 11:05:28.27618436 +0000 UTC m=+722.613285388" lastFinishedPulling="2026-02-02 11:05:32.411979377 +0000 UTC m=+726.749080445" observedRunningTime="2026-02-02 11:05:33.517406317 +0000 UTC m=+727.854507415" watchObservedRunningTime="2026-02-02 11:05:33.530037532 +0000 UTC m=+727.867138590" Feb 02 11:05:37 crc kubenswrapper[4838]: I0202 11:05:37.867255 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-ch2vl" Feb 02 11:05:38 crc kubenswrapper[4838]: I0202 11:05:38.167856 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:38 crc kubenswrapper[4838]: I0202 11:05:38.168514 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:38 crc kubenswrapper[4838]: I0202 11:05:38.177819 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:38 crc kubenswrapper[4838]: I0202 11:05:38.525213 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-595664cbc7-k6qgg" Feb 02 11:05:38 crc kubenswrapper[4838]: I0202 11:05:38.587944 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-2qsm5"] Feb 02 11:05:45 crc kubenswrapper[4838]: I0202 11:05:45.430404 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:05:45 crc kubenswrapper[4838]: I0202 11:05:45.432810 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:05:48 crc kubenswrapper[4838]: I0202 11:05:48.413224 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-nwk6k" Feb 02 11:06:01 crc kubenswrapper[4838]: I0202 11:06:01.248223 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g"] Feb 02 11:06:01 crc kubenswrapper[4838]: I0202 11:06:01.249933 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g" Feb 02 11:06:01 crc kubenswrapper[4838]: I0202 11:06:01.251854 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 02 11:06:01 crc kubenswrapper[4838]: I0202 11:06:01.259975 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g"] Feb 02 11:06:01 crc kubenswrapper[4838]: I0202 11:06:01.402985 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5d5d76da-96c7-47aa-aeb7-e176ab3b89d3-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g\" (UID: \"5d5d76da-96c7-47aa-aeb7-e176ab3b89d3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g" Feb 02 11:06:01 crc kubenswrapper[4838]: I0202 11:06:01.403183 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5d5d76da-96c7-47aa-aeb7-e176ab3b89d3-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g\" (UID: \"5d5d76da-96c7-47aa-aeb7-e176ab3b89d3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g" Feb 02 11:06:01 crc kubenswrapper[4838]: I0202 11:06:01.403261 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkcjc\" (UniqueName: \"kubernetes.io/projected/5d5d76da-96c7-47aa-aeb7-e176ab3b89d3-kube-api-access-pkcjc\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g\" (UID: \"5d5d76da-96c7-47aa-aeb7-e176ab3b89d3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g" Feb 02 11:06:01 crc kubenswrapper[4838]: I0202 11:06:01.504956 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5d5d76da-96c7-47aa-aeb7-e176ab3b89d3-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g\" (UID: \"5d5d76da-96c7-47aa-aeb7-e176ab3b89d3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g" Feb 02 11:06:01 crc kubenswrapper[4838]: I0202 11:06:01.505453 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5d5d76da-96c7-47aa-aeb7-e176ab3b89d3-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g\" (UID: \"5d5d76da-96c7-47aa-aeb7-e176ab3b89d3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g" Feb 02 11:06:01 crc kubenswrapper[4838]: I0202 11:06:01.505862 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkcjc\" (UniqueName: \"kubernetes.io/projected/5d5d76da-96c7-47aa-aeb7-e176ab3b89d3-kube-api-access-pkcjc\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g\" (UID: \"5d5d76da-96c7-47aa-aeb7-e176ab3b89d3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g" Feb 02 11:06:01 crc kubenswrapper[4838]: I0202 11:06:01.505771 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5d5d76da-96c7-47aa-aeb7-e176ab3b89d3-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g\" (UID: \"5d5d76da-96c7-47aa-aeb7-e176ab3b89d3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g" Feb 02 11:06:01 crc kubenswrapper[4838]: I0202 11:06:01.505943 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5d5d76da-96c7-47aa-aeb7-e176ab3b89d3-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g\" (UID: \"5d5d76da-96c7-47aa-aeb7-e176ab3b89d3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g" Feb 02 11:06:01 crc kubenswrapper[4838]: I0202 11:06:01.529596 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkcjc\" (UniqueName: \"kubernetes.io/projected/5d5d76da-96c7-47aa-aeb7-e176ab3b89d3-kube-api-access-pkcjc\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g\" (UID: \"5d5d76da-96c7-47aa-aeb7-e176ab3b89d3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g" Feb 02 11:06:01 crc kubenswrapper[4838]: I0202 11:06:01.566226 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g" Feb 02 11:06:02 crc kubenswrapper[4838]: I0202 11:06:02.089284 4838 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 02 11:06:02 crc kubenswrapper[4838]: I0202 11:06:02.186513 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g"] Feb 02 11:06:02 crc kubenswrapper[4838]: I0202 11:06:02.668281 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g" event={"ID":"5d5d76da-96c7-47aa-aeb7-e176ab3b89d3","Type":"ContainerStarted","Data":"1eee8d63f2669b2117317416b0202aa28cbeb442824179b6a879ff951e05f898"} Feb 02 11:06:03 crc kubenswrapper[4838]: I0202 11:06:03.604376 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2jz52"] Feb 02 11:06:03 crc kubenswrapper[4838]: I0202 11:06:03.606424 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2jz52" Feb 02 11:06:03 crc kubenswrapper[4838]: I0202 11:06:03.616248 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2jz52"] Feb 02 11:06:03 crc kubenswrapper[4838]: I0202 11:06:03.633907 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-2qsm5" podUID="3e879c3f-8c95-449b-b9e7-439c78f48209" containerName="console" containerID="cri-o://cb59937fbe88158c5201821494b1f0933c849f626ebd3341930491a07433790e" gracePeriod=15 Feb 02 11:06:03 crc kubenswrapper[4838]: I0202 11:06:03.669881 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/945078d8-08cb-4ce1-b866-26b0f93bd75c-catalog-content\") pod \"redhat-operators-2jz52\" (UID: \"945078d8-08cb-4ce1-b866-26b0f93bd75c\") " pod="openshift-marketplace/redhat-operators-2jz52" Feb 02 11:06:03 crc kubenswrapper[4838]: I0202 11:06:03.669938 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8p4k2\" (UniqueName: \"kubernetes.io/projected/945078d8-08cb-4ce1-b866-26b0f93bd75c-kube-api-access-8p4k2\") pod \"redhat-operators-2jz52\" (UID: \"945078d8-08cb-4ce1-b866-26b0f93bd75c\") " pod="openshift-marketplace/redhat-operators-2jz52" Feb 02 11:06:03 crc kubenswrapper[4838]: I0202 11:06:03.670017 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/945078d8-08cb-4ce1-b866-26b0f93bd75c-utilities\") pod \"redhat-operators-2jz52\" (UID: \"945078d8-08cb-4ce1-b866-26b0f93bd75c\") " pod="openshift-marketplace/redhat-operators-2jz52" Feb 02 11:06:03 crc kubenswrapper[4838]: I0202 11:06:03.674514 4838 generic.go:334] "Generic (PLEG): container finished" podID="5d5d76da-96c7-47aa-aeb7-e176ab3b89d3" containerID="fa11fb9d2638dfa1826612ffe1cde69ca617b93b606113abc8595a33f6fb27b2" exitCode=0 Feb 02 11:06:03 crc kubenswrapper[4838]: I0202 11:06:03.674561 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g" event={"ID":"5d5d76da-96c7-47aa-aeb7-e176ab3b89d3","Type":"ContainerDied","Data":"fa11fb9d2638dfa1826612ffe1cde69ca617b93b606113abc8595a33f6fb27b2"} Feb 02 11:06:03 crc kubenswrapper[4838]: I0202 11:06:03.771071 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/945078d8-08cb-4ce1-b866-26b0f93bd75c-catalog-content\") pod \"redhat-operators-2jz52\" (UID: \"945078d8-08cb-4ce1-b866-26b0f93bd75c\") " pod="openshift-marketplace/redhat-operators-2jz52" Feb 02 11:06:03 crc kubenswrapper[4838]: I0202 11:06:03.771118 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8p4k2\" (UniqueName: \"kubernetes.io/projected/945078d8-08cb-4ce1-b866-26b0f93bd75c-kube-api-access-8p4k2\") pod \"redhat-operators-2jz52\" (UID: \"945078d8-08cb-4ce1-b866-26b0f93bd75c\") " pod="openshift-marketplace/redhat-operators-2jz52" Feb 02 11:06:03 crc kubenswrapper[4838]: I0202 11:06:03.771173 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/945078d8-08cb-4ce1-b866-26b0f93bd75c-utilities\") pod \"redhat-operators-2jz52\" (UID: \"945078d8-08cb-4ce1-b866-26b0f93bd75c\") " pod="openshift-marketplace/redhat-operators-2jz52" Feb 02 11:06:03 crc kubenswrapper[4838]: I0202 11:06:03.771691 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/945078d8-08cb-4ce1-b866-26b0f93bd75c-catalog-content\") pod \"redhat-operators-2jz52\" (UID: \"945078d8-08cb-4ce1-b866-26b0f93bd75c\") " pod="openshift-marketplace/redhat-operators-2jz52" Feb 02 11:06:03 crc kubenswrapper[4838]: I0202 11:06:03.771695 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/945078d8-08cb-4ce1-b866-26b0f93bd75c-utilities\") pod \"redhat-operators-2jz52\" (UID: \"945078d8-08cb-4ce1-b866-26b0f93bd75c\") " pod="openshift-marketplace/redhat-operators-2jz52" Feb 02 11:06:03 crc kubenswrapper[4838]: I0202 11:06:03.793113 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8p4k2\" (UniqueName: \"kubernetes.io/projected/945078d8-08cb-4ce1-b866-26b0f93bd75c-kube-api-access-8p4k2\") pod \"redhat-operators-2jz52\" (UID: \"945078d8-08cb-4ce1-b866-26b0f93bd75c\") " pod="openshift-marketplace/redhat-operators-2jz52" Feb 02 11:06:03 crc kubenswrapper[4838]: I0202 11:06:03.965376 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2jz52" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.023846 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-2qsm5_3e879c3f-8c95-449b-b9e7-439c78f48209/console/0.log" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.023918 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.179715 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-service-ca\") pod \"3e879c3f-8c95-449b-b9e7-439c78f48209\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.179880 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-console-config\") pod \"3e879c3f-8c95-449b-b9e7-439c78f48209\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.179937 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9d7w\" (UniqueName: \"kubernetes.io/projected/3e879c3f-8c95-449b-b9e7-439c78f48209-kube-api-access-v9d7w\") pod \"3e879c3f-8c95-449b-b9e7-439c78f48209\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.180006 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3e879c3f-8c95-449b-b9e7-439c78f48209-console-oauth-config\") pod \"3e879c3f-8c95-449b-b9e7-439c78f48209\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.180046 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-trusted-ca-bundle\") pod \"3e879c3f-8c95-449b-b9e7-439c78f48209\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.180085 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-oauth-serving-cert\") pod \"3e879c3f-8c95-449b-b9e7-439c78f48209\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.180728 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-console-config" (OuterVolumeSpecName: "console-config") pod "3e879c3f-8c95-449b-b9e7-439c78f48209" (UID: "3e879c3f-8c95-449b-b9e7-439c78f48209"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.180803 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "3e879c3f-8c95-449b-b9e7-439c78f48209" (UID: "3e879c3f-8c95-449b-b9e7-439c78f48209"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.180815 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "3e879c3f-8c95-449b-b9e7-439c78f48209" (UID: "3e879c3f-8c95-449b-b9e7-439c78f48209"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.181000 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3e879c3f-8c95-449b-b9e7-439c78f48209-console-serving-cert\") pod \"3e879c3f-8c95-449b-b9e7-439c78f48209\" (UID: \"3e879c3f-8c95-449b-b9e7-439c78f48209\") " Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.181041 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-service-ca" (OuterVolumeSpecName: "service-ca") pod "3e879c3f-8c95-449b-b9e7-439c78f48209" (UID: "3e879c3f-8c95-449b-b9e7-439c78f48209"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.181525 4838 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-console-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.181552 4838 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.181565 4838 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.181579 4838 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3e879c3f-8c95-449b-b9e7-439c78f48209-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.184783 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e879c3f-8c95-449b-b9e7-439c78f48209-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "3e879c3f-8c95-449b-b9e7-439c78f48209" (UID: "3e879c3f-8c95-449b-b9e7-439c78f48209"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.185122 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e879c3f-8c95-449b-b9e7-439c78f48209-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "3e879c3f-8c95-449b-b9e7-439c78f48209" (UID: "3e879c3f-8c95-449b-b9e7-439c78f48209"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.189214 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e879c3f-8c95-449b-b9e7-439c78f48209-kube-api-access-v9d7w" (OuterVolumeSpecName: "kube-api-access-v9d7w") pod "3e879c3f-8c95-449b-b9e7-439c78f48209" (UID: "3e879c3f-8c95-449b-b9e7-439c78f48209"). InnerVolumeSpecName "kube-api-access-v9d7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.282710 4838 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3e879c3f-8c95-449b-b9e7-439c78f48209-console-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.282758 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9d7w\" (UniqueName: \"kubernetes.io/projected/3e879c3f-8c95-449b-b9e7-439c78f48209-kube-api-access-v9d7w\") on node \"crc\" DevicePath \"\"" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.282778 4838 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3e879c3f-8c95-449b-b9e7-439c78f48209-console-oauth-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.451248 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2jz52"] Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.684131 4838 generic.go:334] "Generic (PLEG): container finished" podID="945078d8-08cb-4ce1-b866-26b0f93bd75c" containerID="504277124ffff91f0d1a8f660f205c251961a1f33815de5b63587bf07889be60" exitCode=0 Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.684268 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jz52" event={"ID":"945078d8-08cb-4ce1-b866-26b0f93bd75c","Type":"ContainerDied","Data":"504277124ffff91f0d1a8f660f205c251961a1f33815de5b63587bf07889be60"} Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.684576 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jz52" event={"ID":"945078d8-08cb-4ce1-b866-26b0f93bd75c","Type":"ContainerStarted","Data":"b92a8f8ea43c5ab7ebfce9830b0e427434d03e6e9470c7009633827dc2760583"} Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.687324 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-2qsm5_3e879c3f-8c95-449b-b9e7-439c78f48209/console/0.log" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.687379 4838 generic.go:334] "Generic (PLEG): container finished" podID="3e879c3f-8c95-449b-b9e7-439c78f48209" containerID="cb59937fbe88158c5201821494b1f0933c849f626ebd3341930491a07433790e" exitCode=2 Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.687422 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2qsm5" event={"ID":"3e879c3f-8c95-449b-b9e7-439c78f48209","Type":"ContainerDied","Data":"cb59937fbe88158c5201821494b1f0933c849f626ebd3341930491a07433790e"} Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.687454 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2qsm5" event={"ID":"3e879c3f-8c95-449b-b9e7-439c78f48209","Type":"ContainerDied","Data":"3cc858cf1908eeb91710e318977d330c13069db13cfad51f3b7cb2281ef66c37"} Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.687474 4838 scope.go:117] "RemoveContainer" containerID="cb59937fbe88158c5201821494b1f0933c849f626ebd3341930491a07433790e" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.687588 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2qsm5" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.709498 4838 scope.go:117] "RemoveContainer" containerID="cb59937fbe88158c5201821494b1f0933c849f626ebd3341930491a07433790e" Feb 02 11:06:04 crc kubenswrapper[4838]: E0202 11:06:04.709929 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb59937fbe88158c5201821494b1f0933c849f626ebd3341930491a07433790e\": container with ID starting with cb59937fbe88158c5201821494b1f0933c849f626ebd3341930491a07433790e not found: ID does not exist" containerID="cb59937fbe88158c5201821494b1f0933c849f626ebd3341930491a07433790e" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.709978 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb59937fbe88158c5201821494b1f0933c849f626ebd3341930491a07433790e"} err="failed to get container status \"cb59937fbe88158c5201821494b1f0933c849f626ebd3341930491a07433790e\": rpc error: code = NotFound desc = could not find container \"cb59937fbe88158c5201821494b1f0933c849f626ebd3341930491a07433790e\": container with ID starting with cb59937fbe88158c5201821494b1f0933c849f626ebd3341930491a07433790e not found: ID does not exist" Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.723541 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-2qsm5"] Feb 02 11:06:04 crc kubenswrapper[4838]: I0202 11:06:04.729270 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-2qsm5"] Feb 02 11:06:05 crc kubenswrapper[4838]: I0202 11:06:05.697954 4838 generic.go:334] "Generic (PLEG): container finished" podID="5d5d76da-96c7-47aa-aeb7-e176ab3b89d3" containerID="9041330dbca4813c2ce5655b8f14a16965ede0c3b1e990867ef90fb1662b23c9" exitCode=0 Feb 02 11:06:05 crc kubenswrapper[4838]: I0202 11:06:05.698043 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g" event={"ID":"5d5d76da-96c7-47aa-aeb7-e176ab3b89d3","Type":"ContainerDied","Data":"9041330dbca4813c2ce5655b8f14a16965ede0c3b1e990867ef90fb1662b23c9"} Feb 02 11:06:06 crc kubenswrapper[4838]: I0202 11:06:06.516192 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e879c3f-8c95-449b-b9e7-439c78f48209" path="/var/lib/kubelet/pods/3e879c3f-8c95-449b-b9e7-439c78f48209/volumes" Feb 02 11:06:06 crc kubenswrapper[4838]: I0202 11:06:06.717999 4838 generic.go:334] "Generic (PLEG): container finished" podID="5d5d76da-96c7-47aa-aeb7-e176ab3b89d3" containerID="655e5b5cbf0edf661f4f132a09455910ed33ad105241bb97148f250a79999275" exitCode=0 Feb 02 11:06:06 crc kubenswrapper[4838]: I0202 11:06:06.718056 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g" event={"ID":"5d5d76da-96c7-47aa-aeb7-e176ab3b89d3","Type":"ContainerDied","Data":"655e5b5cbf0edf661f4f132a09455910ed33ad105241bb97148f250a79999275"} Feb 02 11:06:07 crc kubenswrapper[4838]: I0202 11:06:07.739526 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jz52" event={"ID":"945078d8-08cb-4ce1-b866-26b0f93bd75c","Type":"ContainerStarted","Data":"f1110eedb715c22d259e6b0973c9663e803696432fa667c9b6e37cccebf5a432"} Feb 02 11:06:08 crc kubenswrapper[4838]: I0202 11:06:08.009785 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g" Feb 02 11:06:08 crc kubenswrapper[4838]: I0202 11:06:08.142380 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5d5d76da-96c7-47aa-aeb7-e176ab3b89d3-bundle\") pod \"5d5d76da-96c7-47aa-aeb7-e176ab3b89d3\" (UID: \"5d5d76da-96c7-47aa-aeb7-e176ab3b89d3\") " Feb 02 11:06:08 crc kubenswrapper[4838]: I0202 11:06:08.142466 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5d5d76da-96c7-47aa-aeb7-e176ab3b89d3-util\") pod \"5d5d76da-96c7-47aa-aeb7-e176ab3b89d3\" (UID: \"5d5d76da-96c7-47aa-aeb7-e176ab3b89d3\") " Feb 02 11:06:08 crc kubenswrapper[4838]: I0202 11:06:08.142578 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pkcjc\" (UniqueName: \"kubernetes.io/projected/5d5d76da-96c7-47aa-aeb7-e176ab3b89d3-kube-api-access-pkcjc\") pod \"5d5d76da-96c7-47aa-aeb7-e176ab3b89d3\" (UID: \"5d5d76da-96c7-47aa-aeb7-e176ab3b89d3\") " Feb 02 11:06:08 crc kubenswrapper[4838]: I0202 11:06:08.143795 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d5d76da-96c7-47aa-aeb7-e176ab3b89d3-bundle" (OuterVolumeSpecName: "bundle") pod "5d5d76da-96c7-47aa-aeb7-e176ab3b89d3" (UID: "5d5d76da-96c7-47aa-aeb7-e176ab3b89d3"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:06:08 crc kubenswrapper[4838]: I0202 11:06:08.151746 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d5d76da-96c7-47aa-aeb7-e176ab3b89d3-kube-api-access-pkcjc" (OuterVolumeSpecName: "kube-api-access-pkcjc") pod "5d5d76da-96c7-47aa-aeb7-e176ab3b89d3" (UID: "5d5d76da-96c7-47aa-aeb7-e176ab3b89d3"). InnerVolumeSpecName "kube-api-access-pkcjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:06:08 crc kubenswrapper[4838]: I0202 11:06:08.172150 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d5d76da-96c7-47aa-aeb7-e176ab3b89d3-util" (OuterVolumeSpecName: "util") pod "5d5d76da-96c7-47aa-aeb7-e176ab3b89d3" (UID: "5d5d76da-96c7-47aa-aeb7-e176ab3b89d3"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:06:08 crc kubenswrapper[4838]: I0202 11:06:08.244854 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pkcjc\" (UniqueName: \"kubernetes.io/projected/5d5d76da-96c7-47aa-aeb7-e176ab3b89d3-kube-api-access-pkcjc\") on node \"crc\" DevicePath \"\"" Feb 02 11:06:08 crc kubenswrapper[4838]: I0202 11:06:08.244920 4838 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5d5d76da-96c7-47aa-aeb7-e176ab3b89d3-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:06:08 crc kubenswrapper[4838]: I0202 11:06:08.244943 4838 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5d5d76da-96c7-47aa-aeb7-e176ab3b89d3-util\") on node \"crc\" DevicePath \"\"" Feb 02 11:06:08 crc kubenswrapper[4838]: I0202 11:06:08.754334 4838 generic.go:334] "Generic (PLEG): container finished" podID="945078d8-08cb-4ce1-b866-26b0f93bd75c" containerID="f1110eedb715c22d259e6b0973c9663e803696432fa667c9b6e37cccebf5a432" exitCode=0 Feb 02 11:06:08 crc kubenswrapper[4838]: I0202 11:06:08.754449 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jz52" event={"ID":"945078d8-08cb-4ce1-b866-26b0f93bd75c","Type":"ContainerDied","Data":"f1110eedb715c22d259e6b0973c9663e803696432fa667c9b6e37cccebf5a432"} Feb 02 11:06:08 crc kubenswrapper[4838]: I0202 11:06:08.761070 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g" event={"ID":"5d5d76da-96c7-47aa-aeb7-e176ab3b89d3","Type":"ContainerDied","Data":"1eee8d63f2669b2117317416b0202aa28cbeb442824179b6a879ff951e05f898"} Feb 02 11:06:08 crc kubenswrapper[4838]: I0202 11:06:08.761119 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1eee8d63f2669b2117317416b0202aa28cbeb442824179b6a879ff951e05f898" Feb 02 11:06:08 crc kubenswrapper[4838]: I0202 11:06:08.761172 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g" Feb 02 11:06:09 crc kubenswrapper[4838]: I0202 11:06:09.769395 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jz52" event={"ID":"945078d8-08cb-4ce1-b866-26b0f93bd75c","Type":"ContainerStarted","Data":"8e3df5a34dfccf971ab693da0985a17fe4c337c2161bf348ceaad43249d3f05d"} Feb 02 11:06:09 crc kubenswrapper[4838]: I0202 11:06:09.789296 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2jz52" podStartSLOduration=2.3251469350000002 podStartE2EDuration="6.789279131s" podCreationTimestamp="2026-02-02 11:06:03 +0000 UTC" firstStartedPulling="2026-02-02 11:06:04.688455058 +0000 UTC m=+759.025556096" lastFinishedPulling="2026-02-02 11:06:09.152587224 +0000 UTC m=+763.489688292" observedRunningTime="2026-02-02 11:06:09.787429512 +0000 UTC m=+764.124530560" watchObservedRunningTime="2026-02-02 11:06:09.789279131 +0000 UTC m=+764.126380159" Feb 02 11:06:13 crc kubenswrapper[4838]: I0202 11:06:13.966006 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2jz52" Feb 02 11:06:13 crc kubenswrapper[4838]: I0202 11:06:13.966305 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2jz52" Feb 02 11:06:15 crc kubenswrapper[4838]: I0202 11:06:15.035242 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2jz52" podUID="945078d8-08cb-4ce1-b866-26b0f93bd75c" containerName="registry-server" probeResult="failure" output=< Feb 02 11:06:15 crc kubenswrapper[4838]: timeout: failed to connect service ":50051" within 1s Feb 02 11:06:15 crc kubenswrapper[4838]: > Feb 02 11:06:15 crc kubenswrapper[4838]: I0202 11:06:15.429559 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:06:15 crc kubenswrapper[4838]: I0202 11:06:15.429647 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.664333 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-67dbdd759f-klfvs"] Feb 02 11:06:16 crc kubenswrapper[4838]: E0202 11:06:16.664604 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d5d76da-96c7-47aa-aeb7-e176ab3b89d3" containerName="pull" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.664634 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d5d76da-96c7-47aa-aeb7-e176ab3b89d3" containerName="pull" Feb 02 11:06:16 crc kubenswrapper[4838]: E0202 11:06:16.664646 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d5d76da-96c7-47aa-aeb7-e176ab3b89d3" containerName="extract" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.664652 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d5d76da-96c7-47aa-aeb7-e176ab3b89d3" containerName="extract" Feb 02 11:06:16 crc kubenswrapper[4838]: E0202 11:06:16.664668 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d5d76da-96c7-47aa-aeb7-e176ab3b89d3" containerName="util" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.664674 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d5d76da-96c7-47aa-aeb7-e176ab3b89d3" containerName="util" Feb 02 11:06:16 crc kubenswrapper[4838]: E0202 11:06:16.664683 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e879c3f-8c95-449b-b9e7-439c78f48209" containerName="console" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.664689 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e879c3f-8c95-449b-b9e7-439c78f48209" containerName="console" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.664779 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e879c3f-8c95-449b-b9e7-439c78f48209" containerName="console" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.664795 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d5d76da-96c7-47aa-aeb7-e176ab3b89d3" containerName="extract" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.665151 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-67dbdd759f-klfvs" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.668165 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.668400 4838 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.668565 4838 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-25zz9" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.668718 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.675332 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-67dbdd759f-klfvs"] Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.675488 4838 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.754828 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/734aaa76-0e63-4bf4-9b2d-60a0346dfcac-webhook-cert\") pod \"metallb-operator-controller-manager-67dbdd759f-klfvs\" (UID: \"734aaa76-0e63-4bf4-9b2d-60a0346dfcac\") " pod="metallb-system/metallb-operator-controller-manager-67dbdd759f-klfvs" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.777535 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/734aaa76-0e63-4bf4-9b2d-60a0346dfcac-apiservice-cert\") pod \"metallb-operator-controller-manager-67dbdd759f-klfvs\" (UID: \"734aaa76-0e63-4bf4-9b2d-60a0346dfcac\") " pod="metallb-system/metallb-operator-controller-manager-67dbdd759f-klfvs" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.777649 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bn45s\" (UniqueName: \"kubernetes.io/projected/734aaa76-0e63-4bf4-9b2d-60a0346dfcac-kube-api-access-bn45s\") pod \"metallb-operator-controller-manager-67dbdd759f-klfvs\" (UID: \"734aaa76-0e63-4bf4-9b2d-60a0346dfcac\") " pod="metallb-system/metallb-operator-controller-manager-67dbdd759f-klfvs" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.886602 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/734aaa76-0e63-4bf4-9b2d-60a0346dfcac-webhook-cert\") pod \"metallb-operator-controller-manager-67dbdd759f-klfvs\" (UID: \"734aaa76-0e63-4bf4-9b2d-60a0346dfcac\") " pod="metallb-system/metallb-operator-controller-manager-67dbdd759f-klfvs" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.886897 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/734aaa76-0e63-4bf4-9b2d-60a0346dfcac-apiservice-cert\") pod \"metallb-operator-controller-manager-67dbdd759f-klfvs\" (UID: \"734aaa76-0e63-4bf4-9b2d-60a0346dfcac\") " pod="metallb-system/metallb-operator-controller-manager-67dbdd759f-klfvs" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.886982 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bn45s\" (UniqueName: \"kubernetes.io/projected/734aaa76-0e63-4bf4-9b2d-60a0346dfcac-kube-api-access-bn45s\") pod \"metallb-operator-controller-manager-67dbdd759f-klfvs\" (UID: \"734aaa76-0e63-4bf4-9b2d-60a0346dfcac\") " pod="metallb-system/metallb-operator-controller-manager-67dbdd759f-klfvs" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.896454 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/734aaa76-0e63-4bf4-9b2d-60a0346dfcac-apiservice-cert\") pod \"metallb-operator-controller-manager-67dbdd759f-klfvs\" (UID: \"734aaa76-0e63-4bf4-9b2d-60a0346dfcac\") " pod="metallb-system/metallb-operator-controller-manager-67dbdd759f-klfvs" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.899218 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/734aaa76-0e63-4bf4-9b2d-60a0346dfcac-webhook-cert\") pod \"metallb-operator-controller-manager-67dbdd759f-klfvs\" (UID: \"734aaa76-0e63-4bf4-9b2d-60a0346dfcac\") " pod="metallb-system/metallb-operator-controller-manager-67dbdd759f-klfvs" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.958881 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bn45s\" (UniqueName: \"kubernetes.io/projected/734aaa76-0e63-4bf4-9b2d-60a0346dfcac-kube-api-access-bn45s\") pod \"metallb-operator-controller-manager-67dbdd759f-klfvs\" (UID: \"734aaa76-0e63-4bf4-9b2d-60a0346dfcac\") " pod="metallb-system/metallb-operator-controller-manager-67dbdd759f-klfvs" Feb 02 11:06:16 crc kubenswrapper[4838]: I0202 11:06:16.991731 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-67dbdd759f-klfvs" Feb 02 11:06:17 crc kubenswrapper[4838]: I0202 11:06:17.092171 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-b56888666-82h5d"] Feb 02 11:06:17 crc kubenswrapper[4838]: I0202 11:06:17.092868 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-b56888666-82h5d" Feb 02 11:06:17 crc kubenswrapper[4838]: I0202 11:06:17.115374 4838 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-8swtf" Feb 02 11:06:17 crc kubenswrapper[4838]: I0202 11:06:17.115560 4838 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Feb 02 11:06:17 crc kubenswrapper[4838]: I0202 11:06:17.115683 4838 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Feb 02 11:06:17 crc kubenswrapper[4838]: I0202 11:06:17.120818 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-b56888666-82h5d"] Feb 02 11:06:17 crc kubenswrapper[4838]: I0202 11:06:17.190576 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlbm4\" (UniqueName: \"kubernetes.io/projected/ffd0594c-3abc-4b1a-89e4-0face9bad35f-kube-api-access-jlbm4\") pod \"metallb-operator-webhook-server-b56888666-82h5d\" (UID: \"ffd0594c-3abc-4b1a-89e4-0face9bad35f\") " pod="metallb-system/metallb-operator-webhook-server-b56888666-82h5d" Feb 02 11:06:17 crc kubenswrapper[4838]: I0202 11:06:17.190770 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ffd0594c-3abc-4b1a-89e4-0face9bad35f-apiservice-cert\") pod \"metallb-operator-webhook-server-b56888666-82h5d\" (UID: \"ffd0594c-3abc-4b1a-89e4-0face9bad35f\") " pod="metallb-system/metallb-operator-webhook-server-b56888666-82h5d" Feb 02 11:06:17 crc kubenswrapper[4838]: I0202 11:06:17.190801 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ffd0594c-3abc-4b1a-89e4-0face9bad35f-webhook-cert\") pod \"metallb-operator-webhook-server-b56888666-82h5d\" (UID: \"ffd0594c-3abc-4b1a-89e4-0face9bad35f\") " pod="metallb-system/metallb-operator-webhook-server-b56888666-82h5d" Feb 02 11:06:17 crc kubenswrapper[4838]: I0202 11:06:17.292052 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlbm4\" (UniqueName: \"kubernetes.io/projected/ffd0594c-3abc-4b1a-89e4-0face9bad35f-kube-api-access-jlbm4\") pod \"metallb-operator-webhook-server-b56888666-82h5d\" (UID: \"ffd0594c-3abc-4b1a-89e4-0face9bad35f\") " pod="metallb-system/metallb-operator-webhook-server-b56888666-82h5d" Feb 02 11:06:17 crc kubenswrapper[4838]: I0202 11:06:17.292129 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ffd0594c-3abc-4b1a-89e4-0face9bad35f-apiservice-cert\") pod \"metallb-operator-webhook-server-b56888666-82h5d\" (UID: \"ffd0594c-3abc-4b1a-89e4-0face9bad35f\") " pod="metallb-system/metallb-operator-webhook-server-b56888666-82h5d" Feb 02 11:06:17 crc kubenswrapper[4838]: I0202 11:06:17.292154 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ffd0594c-3abc-4b1a-89e4-0face9bad35f-webhook-cert\") pod \"metallb-operator-webhook-server-b56888666-82h5d\" (UID: \"ffd0594c-3abc-4b1a-89e4-0face9bad35f\") " pod="metallb-system/metallb-operator-webhook-server-b56888666-82h5d" Feb 02 11:06:17 crc kubenswrapper[4838]: I0202 11:06:17.296310 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ffd0594c-3abc-4b1a-89e4-0face9bad35f-apiservice-cert\") pod \"metallb-operator-webhook-server-b56888666-82h5d\" (UID: \"ffd0594c-3abc-4b1a-89e4-0face9bad35f\") " pod="metallb-system/metallb-operator-webhook-server-b56888666-82h5d" Feb 02 11:06:17 crc kubenswrapper[4838]: I0202 11:06:17.296350 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ffd0594c-3abc-4b1a-89e4-0face9bad35f-webhook-cert\") pod \"metallb-operator-webhook-server-b56888666-82h5d\" (UID: \"ffd0594c-3abc-4b1a-89e4-0face9bad35f\") " pod="metallb-system/metallb-operator-webhook-server-b56888666-82h5d" Feb 02 11:06:17 crc kubenswrapper[4838]: I0202 11:06:17.310931 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlbm4\" (UniqueName: \"kubernetes.io/projected/ffd0594c-3abc-4b1a-89e4-0face9bad35f-kube-api-access-jlbm4\") pod \"metallb-operator-webhook-server-b56888666-82h5d\" (UID: \"ffd0594c-3abc-4b1a-89e4-0face9bad35f\") " pod="metallb-system/metallb-operator-webhook-server-b56888666-82h5d" Feb 02 11:06:17 crc kubenswrapper[4838]: I0202 11:06:17.435376 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-b56888666-82h5d" Feb 02 11:06:17 crc kubenswrapper[4838]: I0202 11:06:17.484860 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-67dbdd759f-klfvs"] Feb 02 11:06:17 crc kubenswrapper[4838]: W0202 11:06:17.490242 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod734aaa76_0e63_4bf4_9b2d_60a0346dfcac.slice/crio-b8b513c67f71d578206a117399424c93dbb1d9deaf258726ddccd6dbc532618c WatchSource:0}: Error finding container b8b513c67f71d578206a117399424c93dbb1d9deaf258726ddccd6dbc532618c: Status 404 returned error can't find the container with id b8b513c67f71d578206a117399424c93dbb1d9deaf258726ddccd6dbc532618c Feb 02 11:06:17 crc kubenswrapper[4838]: I0202 11:06:17.658559 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-b56888666-82h5d"] Feb 02 11:06:17 crc kubenswrapper[4838]: W0202 11:06:17.669241 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podffd0594c_3abc_4b1a_89e4_0face9bad35f.slice/crio-ba7f0909e0e9c245bafcb99d5bffb1876c1d7eca9f9a6f8466a1169a88b83410 WatchSource:0}: Error finding container ba7f0909e0e9c245bafcb99d5bffb1876c1d7eca9f9a6f8466a1169a88b83410: Status 404 returned error can't find the container with id ba7f0909e0e9c245bafcb99d5bffb1876c1d7eca9f9a6f8466a1169a88b83410 Feb 02 11:06:17 crc kubenswrapper[4838]: I0202 11:06:17.830405 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-b56888666-82h5d" event={"ID":"ffd0594c-3abc-4b1a-89e4-0face9bad35f","Type":"ContainerStarted","Data":"ba7f0909e0e9c245bafcb99d5bffb1876c1d7eca9f9a6f8466a1169a88b83410"} Feb 02 11:06:17 crc kubenswrapper[4838]: I0202 11:06:17.832802 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-67dbdd759f-klfvs" event={"ID":"734aaa76-0e63-4bf4-9b2d-60a0346dfcac","Type":"ContainerStarted","Data":"b8b513c67f71d578206a117399424c93dbb1d9deaf258726ddccd6dbc532618c"} Feb 02 11:06:23 crc kubenswrapper[4838]: I0202 11:06:23.868274 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-b56888666-82h5d" event={"ID":"ffd0594c-3abc-4b1a-89e4-0face9bad35f","Type":"ContainerStarted","Data":"bf0aa31a01e45d9c2f9aa7e64d3118fda0e7771ab57f0319b57eac86ab960248"} Feb 02 11:06:23 crc kubenswrapper[4838]: I0202 11:06:23.869044 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-b56888666-82h5d" Feb 02 11:06:23 crc kubenswrapper[4838]: I0202 11:06:23.871855 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-67dbdd759f-klfvs" event={"ID":"734aaa76-0e63-4bf4-9b2d-60a0346dfcac","Type":"ContainerStarted","Data":"76a74bd01bbe618da39ce9f15d50f04ccca94d84939ea6dd675a5e4edaa71a01"} Feb 02 11:06:23 crc kubenswrapper[4838]: I0202 11:06:23.872011 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-67dbdd759f-klfvs" Feb 02 11:06:23 crc kubenswrapper[4838]: I0202 11:06:23.894147 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-b56888666-82h5d" podStartSLOduration=1.846451292 podStartE2EDuration="6.894129065s" podCreationTimestamp="2026-02-02 11:06:17 +0000 UTC" firstStartedPulling="2026-02-02 11:06:17.672298007 +0000 UTC m=+772.009399035" lastFinishedPulling="2026-02-02 11:06:22.71997578 +0000 UTC m=+777.057076808" observedRunningTime="2026-02-02 11:06:23.891175138 +0000 UTC m=+778.228276196" watchObservedRunningTime="2026-02-02 11:06:23.894129065 +0000 UTC m=+778.231230103" Feb 02 11:06:23 crc kubenswrapper[4838]: I0202 11:06:23.922876 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-67dbdd759f-klfvs" podStartSLOduration=2.719610971 podStartE2EDuration="7.92285487s" podCreationTimestamp="2026-02-02 11:06:16 +0000 UTC" firstStartedPulling="2026-02-02 11:06:17.497126021 +0000 UTC m=+771.834227049" lastFinishedPulling="2026-02-02 11:06:22.70036992 +0000 UTC m=+777.037470948" observedRunningTime="2026-02-02 11:06:23.917875569 +0000 UTC m=+778.254976617" watchObservedRunningTime="2026-02-02 11:06:23.92285487 +0000 UTC m=+778.259955918" Feb 02 11:06:24 crc kubenswrapper[4838]: I0202 11:06:24.007071 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2jz52" Feb 02 11:06:24 crc kubenswrapper[4838]: I0202 11:06:24.047222 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2jz52" Feb 02 11:06:25 crc kubenswrapper[4838]: I0202 11:06:25.190488 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2jz52"] Feb 02 11:06:25 crc kubenswrapper[4838]: I0202 11:06:25.881725 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2jz52" podUID="945078d8-08cb-4ce1-b866-26b0f93bd75c" containerName="registry-server" containerID="cri-o://8e3df5a34dfccf971ab693da0985a17fe4c337c2161bf348ceaad43249d3f05d" gracePeriod=2 Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.275188 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2jz52" Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.337444 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/945078d8-08cb-4ce1-b866-26b0f93bd75c-catalog-content\") pod \"945078d8-08cb-4ce1-b866-26b0f93bd75c\" (UID: \"945078d8-08cb-4ce1-b866-26b0f93bd75c\") " Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.337494 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8p4k2\" (UniqueName: \"kubernetes.io/projected/945078d8-08cb-4ce1-b866-26b0f93bd75c-kube-api-access-8p4k2\") pod \"945078d8-08cb-4ce1-b866-26b0f93bd75c\" (UID: \"945078d8-08cb-4ce1-b866-26b0f93bd75c\") " Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.337582 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/945078d8-08cb-4ce1-b866-26b0f93bd75c-utilities\") pod \"945078d8-08cb-4ce1-b866-26b0f93bd75c\" (UID: \"945078d8-08cb-4ce1-b866-26b0f93bd75c\") " Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.338512 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/945078d8-08cb-4ce1-b866-26b0f93bd75c-utilities" (OuterVolumeSpecName: "utilities") pod "945078d8-08cb-4ce1-b866-26b0f93bd75c" (UID: "945078d8-08cb-4ce1-b866-26b0f93bd75c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.343018 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/945078d8-08cb-4ce1-b866-26b0f93bd75c-kube-api-access-8p4k2" (OuterVolumeSpecName: "kube-api-access-8p4k2") pod "945078d8-08cb-4ce1-b866-26b0f93bd75c" (UID: "945078d8-08cb-4ce1-b866-26b0f93bd75c"). InnerVolumeSpecName "kube-api-access-8p4k2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.438456 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8p4k2\" (UniqueName: \"kubernetes.io/projected/945078d8-08cb-4ce1-b866-26b0f93bd75c-kube-api-access-8p4k2\") on node \"crc\" DevicePath \"\"" Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.438488 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/945078d8-08cb-4ce1-b866-26b0f93bd75c-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.467861 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/945078d8-08cb-4ce1-b866-26b0f93bd75c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "945078d8-08cb-4ce1-b866-26b0f93bd75c" (UID: "945078d8-08cb-4ce1-b866-26b0f93bd75c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.539143 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/945078d8-08cb-4ce1-b866-26b0f93bd75c-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.889261 4838 generic.go:334] "Generic (PLEG): container finished" podID="945078d8-08cb-4ce1-b866-26b0f93bd75c" containerID="8e3df5a34dfccf971ab693da0985a17fe4c337c2161bf348ceaad43249d3f05d" exitCode=0 Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.889309 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jz52" event={"ID":"945078d8-08cb-4ce1-b866-26b0f93bd75c","Type":"ContainerDied","Data":"8e3df5a34dfccf971ab693da0985a17fe4c337c2161bf348ceaad43249d3f05d"} Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.889338 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2jz52" Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.889363 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jz52" event={"ID":"945078d8-08cb-4ce1-b866-26b0f93bd75c","Type":"ContainerDied","Data":"b92a8f8ea43c5ab7ebfce9830b0e427434d03e6e9470c7009633827dc2760583"} Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.889387 4838 scope.go:117] "RemoveContainer" containerID="8e3df5a34dfccf971ab693da0985a17fe4c337c2161bf348ceaad43249d3f05d" Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.904705 4838 scope.go:117] "RemoveContainer" containerID="f1110eedb715c22d259e6b0973c9663e803696432fa667c9b6e37cccebf5a432" Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.911726 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2jz52"] Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.915836 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2jz52"] Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.921439 4838 scope.go:117] "RemoveContainer" containerID="504277124ffff91f0d1a8f660f205c251961a1f33815de5b63587bf07889be60" Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.964907 4838 scope.go:117] "RemoveContainer" containerID="8e3df5a34dfccf971ab693da0985a17fe4c337c2161bf348ceaad43249d3f05d" Feb 02 11:06:26 crc kubenswrapper[4838]: E0202 11:06:26.965230 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e3df5a34dfccf971ab693da0985a17fe4c337c2161bf348ceaad43249d3f05d\": container with ID starting with 8e3df5a34dfccf971ab693da0985a17fe4c337c2161bf348ceaad43249d3f05d not found: ID does not exist" containerID="8e3df5a34dfccf971ab693da0985a17fe4c337c2161bf348ceaad43249d3f05d" Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.965276 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e3df5a34dfccf971ab693da0985a17fe4c337c2161bf348ceaad43249d3f05d"} err="failed to get container status \"8e3df5a34dfccf971ab693da0985a17fe4c337c2161bf348ceaad43249d3f05d\": rpc error: code = NotFound desc = could not find container \"8e3df5a34dfccf971ab693da0985a17fe4c337c2161bf348ceaad43249d3f05d\": container with ID starting with 8e3df5a34dfccf971ab693da0985a17fe4c337c2161bf348ceaad43249d3f05d not found: ID does not exist" Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.965298 4838 scope.go:117] "RemoveContainer" containerID="f1110eedb715c22d259e6b0973c9663e803696432fa667c9b6e37cccebf5a432" Feb 02 11:06:26 crc kubenswrapper[4838]: E0202 11:06:26.965675 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1110eedb715c22d259e6b0973c9663e803696432fa667c9b6e37cccebf5a432\": container with ID starting with f1110eedb715c22d259e6b0973c9663e803696432fa667c9b6e37cccebf5a432 not found: ID does not exist" containerID="f1110eedb715c22d259e6b0973c9663e803696432fa667c9b6e37cccebf5a432" Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.965701 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1110eedb715c22d259e6b0973c9663e803696432fa667c9b6e37cccebf5a432"} err="failed to get container status \"f1110eedb715c22d259e6b0973c9663e803696432fa667c9b6e37cccebf5a432\": rpc error: code = NotFound desc = could not find container \"f1110eedb715c22d259e6b0973c9663e803696432fa667c9b6e37cccebf5a432\": container with ID starting with f1110eedb715c22d259e6b0973c9663e803696432fa667c9b6e37cccebf5a432 not found: ID does not exist" Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.965719 4838 scope.go:117] "RemoveContainer" containerID="504277124ffff91f0d1a8f660f205c251961a1f33815de5b63587bf07889be60" Feb 02 11:06:26 crc kubenswrapper[4838]: E0202 11:06:26.966015 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"504277124ffff91f0d1a8f660f205c251961a1f33815de5b63587bf07889be60\": container with ID starting with 504277124ffff91f0d1a8f660f205c251961a1f33815de5b63587bf07889be60 not found: ID does not exist" containerID="504277124ffff91f0d1a8f660f205c251961a1f33815de5b63587bf07889be60" Feb 02 11:06:26 crc kubenswrapper[4838]: I0202 11:06:26.966066 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"504277124ffff91f0d1a8f660f205c251961a1f33815de5b63587bf07889be60"} err="failed to get container status \"504277124ffff91f0d1a8f660f205c251961a1f33815de5b63587bf07889be60\": rpc error: code = NotFound desc = could not find container \"504277124ffff91f0d1a8f660f205c251961a1f33815de5b63587bf07889be60\": container with ID starting with 504277124ffff91f0d1a8f660f205c251961a1f33815de5b63587bf07889be60 not found: ID does not exist" Feb 02 11:06:28 crc kubenswrapper[4838]: I0202 11:06:28.513592 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="945078d8-08cb-4ce1-b866-26b0f93bd75c" path="/var/lib/kubelet/pods/945078d8-08cb-4ce1-b866-26b0f93bd75c/volumes" Feb 02 11:06:37 crc kubenswrapper[4838]: I0202 11:06:37.500087 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-b56888666-82h5d" Feb 02 11:06:45 crc kubenswrapper[4838]: I0202 11:06:45.429854 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:06:45 crc kubenswrapper[4838]: I0202 11:06:45.430424 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:06:45 crc kubenswrapper[4838]: I0202 11:06:45.430480 4838 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 11:06:45 crc kubenswrapper[4838]: I0202 11:06:45.431217 4838 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"47f84caf6e841371c2d1b572818b4b359f4d7377669649aa2f737bf7eb7b98db"} pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 11:06:45 crc kubenswrapper[4838]: I0202 11:06:45.431293 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" containerID="cri-o://47f84caf6e841371c2d1b572818b4b359f4d7377669649aa2f737bf7eb7b98db" gracePeriod=600 Feb 02 11:06:45 crc kubenswrapper[4838]: I0202 11:06:45.994139 4838 generic.go:334] "Generic (PLEG): container finished" podID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerID="47f84caf6e841371c2d1b572818b4b359f4d7377669649aa2f737bf7eb7b98db" exitCode=0 Feb 02 11:06:45 crc kubenswrapper[4838]: I0202 11:06:45.994188 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerDied","Data":"47f84caf6e841371c2d1b572818b4b359f4d7377669649aa2f737bf7eb7b98db"} Feb 02 11:06:45 crc kubenswrapper[4838]: I0202 11:06:45.994225 4838 scope.go:117] "RemoveContainer" containerID="9ae800805fcd8b11ea61c3e69aa000f94373b0daa53dc9b9faa0877f99ba8a3a" Feb 02 11:06:47 crc kubenswrapper[4838]: I0202 11:06:47.001833 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerStarted","Data":"411e6aa6542cc291703765b915acdf4b4838b2ed95b8455f8ee0b804a9cfdae7"} Feb 02 11:06:56 crc kubenswrapper[4838]: I0202 11:06:56.994594 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-67dbdd759f-klfvs" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.819204 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-ltddp"] Feb 02 11:06:57 crc kubenswrapper[4838]: E0202 11:06:57.819716 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="945078d8-08cb-4ce1-b866-26b0f93bd75c" containerName="registry-server" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.819733 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="945078d8-08cb-4ce1-b866-26b0f93bd75c" containerName="registry-server" Feb 02 11:06:57 crc kubenswrapper[4838]: E0202 11:06:57.819746 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="945078d8-08cb-4ce1-b866-26b0f93bd75c" containerName="extract-utilities" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.819756 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="945078d8-08cb-4ce1-b866-26b0f93bd75c" containerName="extract-utilities" Feb 02 11:06:57 crc kubenswrapper[4838]: E0202 11:06:57.819772 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="945078d8-08cb-4ce1-b866-26b0f93bd75c" containerName="extract-content" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.819778 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="945078d8-08cb-4ce1-b866-26b0f93bd75c" containerName="extract-content" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.819882 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="945078d8-08cb-4ce1-b866-26b0f93bd75c" containerName="registry-server" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.821743 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.828161 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-czj6m"] Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.828982 4838 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.829048 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-czj6m" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.829134 4838 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-f2rxl" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.829201 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.833974 4838 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.843448 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-czj6m"] Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.880918 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zb62d\" (UniqueName: \"kubernetes.io/projected/80562d51-943d-4213-abbd-099b4e891ce9-kube-api-access-zb62d\") pod \"frr-k8s-webhook-server-7df86c4f6c-czj6m\" (UID: \"80562d51-943d-4213-abbd-099b4e891ce9\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-czj6m" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.880971 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m47tm\" (UniqueName: \"kubernetes.io/projected/b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6-kube-api-access-m47tm\") pod \"frr-k8s-ltddp\" (UID: \"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6\") " pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.880989 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6-frr-conf\") pod \"frr-k8s-ltddp\" (UID: \"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6\") " pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.881004 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6-frr-startup\") pod \"frr-k8s-ltddp\" (UID: \"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6\") " pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.881024 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6-reloader\") pod \"frr-k8s-ltddp\" (UID: \"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6\") " pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.881061 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6-metrics-certs\") pod \"frr-k8s-ltddp\" (UID: \"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6\") " pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.881090 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6-frr-sockets\") pod \"frr-k8s-ltddp\" (UID: \"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6\") " pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.881106 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6-metrics\") pod \"frr-k8s-ltddp\" (UID: \"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6\") " pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.881124 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/80562d51-943d-4213-abbd-099b4e891ce9-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-czj6m\" (UID: \"80562d51-943d-4213-abbd-099b4e891ce9\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-czj6m" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.908949 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-pf2fp"] Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.909965 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-pf2fp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.921014 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6968d8fdc4-7kdtd"] Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.921880 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-7kdtd" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.926755 4838 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-hd87k" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.926874 4838 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.926907 4838 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.927103 4838 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.927177 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.932226 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-7kdtd"] Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.981954 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4-metrics-certs\") pod \"controller-6968d8fdc4-7kdtd\" (UID: \"b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4\") " pod="metallb-system/controller-6968d8fdc4-7kdtd" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.982014 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6-frr-sockets\") pod \"frr-k8s-ltddp\" (UID: \"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6\") " pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.982254 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6-metrics\") pod \"frr-k8s-ltddp\" (UID: \"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6\") " pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.982348 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6-frr-sockets\") pod \"frr-k8s-ltddp\" (UID: \"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6\") " pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.982420 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/80562d51-943d-4213-abbd-099b4e891ce9-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-czj6m\" (UID: \"80562d51-943d-4213-abbd-099b4e891ce9\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-czj6m" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.982516 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zb62d\" (UniqueName: \"kubernetes.io/projected/80562d51-943d-4213-abbd-099b4e891ce9-kube-api-access-zb62d\") pod \"frr-k8s-webhook-server-7df86c4f6c-czj6m\" (UID: \"80562d51-943d-4213-abbd-099b4e891ce9\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-czj6m" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.982607 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6zzt\" (UniqueName: \"kubernetes.io/projected/306c1a4f-3d28-4cc9-91bd-a78c25803845-kube-api-access-p6zzt\") pod \"speaker-pf2fp\" (UID: \"306c1a4f-3d28-4cc9-91bd-a78c25803845\") " pod="metallb-system/speaker-pf2fp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.982688 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6-metrics\") pod \"frr-k8s-ltddp\" (UID: \"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6\") " pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.982833 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m47tm\" (UniqueName: \"kubernetes.io/projected/b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6-kube-api-access-m47tm\") pod \"frr-k8s-ltddp\" (UID: \"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6\") " pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.982896 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/306c1a4f-3d28-4cc9-91bd-a78c25803845-metallb-excludel2\") pod \"speaker-pf2fp\" (UID: \"306c1a4f-3d28-4cc9-91bd-a78c25803845\") " pod="metallb-system/speaker-pf2fp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.982927 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6-frr-conf\") pod \"frr-k8s-ltddp\" (UID: \"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6\") " pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.982966 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6-frr-startup\") pod \"frr-k8s-ltddp\" (UID: \"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6\") " pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.982992 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4-cert\") pod \"controller-6968d8fdc4-7kdtd\" (UID: \"b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4\") " pod="metallb-system/controller-6968d8fdc4-7kdtd" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.983020 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6-reloader\") pod \"frr-k8s-ltddp\" (UID: \"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6\") " pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.983057 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/306c1a4f-3d28-4cc9-91bd-a78c25803845-metrics-certs\") pod \"speaker-pf2fp\" (UID: \"306c1a4f-3d28-4cc9-91bd-a78c25803845\") " pod="metallb-system/speaker-pf2fp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.983087 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5s7q8\" (UniqueName: \"kubernetes.io/projected/b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4-kube-api-access-5s7q8\") pod \"controller-6968d8fdc4-7kdtd\" (UID: \"b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4\") " pod="metallb-system/controller-6968d8fdc4-7kdtd" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.983125 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/306c1a4f-3d28-4cc9-91bd-a78c25803845-memberlist\") pod \"speaker-pf2fp\" (UID: \"306c1a4f-3d28-4cc9-91bd-a78c25803845\") " pod="metallb-system/speaker-pf2fp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.983191 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6-metrics-certs\") pod \"frr-k8s-ltddp\" (UID: \"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6\") " pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.983207 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6-frr-conf\") pod \"frr-k8s-ltddp\" (UID: \"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6\") " pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.983955 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6-reloader\") pod \"frr-k8s-ltddp\" (UID: \"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6\") " pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.984022 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6-frr-startup\") pod \"frr-k8s-ltddp\" (UID: \"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6\") " pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.988044 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6-metrics-certs\") pod \"frr-k8s-ltddp\" (UID: \"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6\") " pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:57 crc kubenswrapper[4838]: I0202 11:06:57.991187 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/80562d51-943d-4213-abbd-099b4e891ce9-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-czj6m\" (UID: \"80562d51-943d-4213-abbd-099b4e891ce9\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-czj6m" Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.000092 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m47tm\" (UniqueName: \"kubernetes.io/projected/b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6-kube-api-access-m47tm\") pod \"frr-k8s-ltddp\" (UID: \"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6\") " pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.002468 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zb62d\" (UniqueName: \"kubernetes.io/projected/80562d51-943d-4213-abbd-099b4e891ce9-kube-api-access-zb62d\") pod \"frr-k8s-webhook-server-7df86c4f6c-czj6m\" (UID: \"80562d51-943d-4213-abbd-099b4e891ce9\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-czj6m" Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.084478 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5s7q8\" (UniqueName: \"kubernetes.io/projected/b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4-kube-api-access-5s7q8\") pod \"controller-6968d8fdc4-7kdtd\" (UID: \"b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4\") " pod="metallb-system/controller-6968d8fdc4-7kdtd" Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.084525 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/306c1a4f-3d28-4cc9-91bd-a78c25803845-memberlist\") pod \"speaker-pf2fp\" (UID: \"306c1a4f-3d28-4cc9-91bd-a78c25803845\") " pod="metallb-system/speaker-pf2fp" Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.084559 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4-metrics-certs\") pod \"controller-6968d8fdc4-7kdtd\" (UID: \"b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4\") " pod="metallb-system/controller-6968d8fdc4-7kdtd" Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.084597 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6zzt\" (UniqueName: \"kubernetes.io/projected/306c1a4f-3d28-4cc9-91bd-a78c25803845-kube-api-access-p6zzt\") pod \"speaker-pf2fp\" (UID: \"306c1a4f-3d28-4cc9-91bd-a78c25803845\") " pod="metallb-system/speaker-pf2fp" Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.084643 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/306c1a4f-3d28-4cc9-91bd-a78c25803845-metallb-excludel2\") pod \"speaker-pf2fp\" (UID: \"306c1a4f-3d28-4cc9-91bd-a78c25803845\") " pod="metallb-system/speaker-pf2fp" Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.084663 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4-cert\") pod \"controller-6968d8fdc4-7kdtd\" (UID: \"b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4\") " pod="metallb-system/controller-6968d8fdc4-7kdtd" Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.084682 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/306c1a4f-3d28-4cc9-91bd-a78c25803845-metrics-certs\") pod \"speaker-pf2fp\" (UID: \"306c1a4f-3d28-4cc9-91bd-a78c25803845\") " pod="metallb-system/speaker-pf2fp" Feb 02 11:06:58 crc kubenswrapper[4838]: E0202 11:06:58.084910 4838 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Feb 02 11:06:58 crc kubenswrapper[4838]: E0202 11:06:58.085000 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/306c1a4f-3d28-4cc9-91bd-a78c25803845-memberlist podName:306c1a4f-3d28-4cc9-91bd-a78c25803845 nodeName:}" failed. No retries permitted until 2026-02-02 11:06:58.584973369 +0000 UTC m=+812.922074407 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/306c1a4f-3d28-4cc9-91bd-a78c25803845-memberlist") pod "speaker-pf2fp" (UID: "306c1a4f-3d28-4cc9-91bd-a78c25803845") : secret "metallb-memberlist" not found Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.085495 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/306c1a4f-3d28-4cc9-91bd-a78c25803845-metallb-excludel2\") pod \"speaker-pf2fp\" (UID: \"306c1a4f-3d28-4cc9-91bd-a78c25803845\") " pod="metallb-system/speaker-pf2fp" Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.087412 4838 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.087837 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/306c1a4f-3d28-4cc9-91bd-a78c25803845-metrics-certs\") pod \"speaker-pf2fp\" (UID: \"306c1a4f-3d28-4cc9-91bd-a78c25803845\") " pod="metallb-system/speaker-pf2fp" Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.088265 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4-metrics-certs\") pod \"controller-6968d8fdc4-7kdtd\" (UID: \"b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4\") " pod="metallb-system/controller-6968d8fdc4-7kdtd" Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.104674 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4-cert\") pod \"controller-6968d8fdc4-7kdtd\" (UID: \"b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4\") " pod="metallb-system/controller-6968d8fdc4-7kdtd" Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.107152 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6zzt\" (UniqueName: \"kubernetes.io/projected/306c1a4f-3d28-4cc9-91bd-a78c25803845-kube-api-access-p6zzt\") pod \"speaker-pf2fp\" (UID: \"306c1a4f-3d28-4cc9-91bd-a78c25803845\") " pod="metallb-system/speaker-pf2fp" Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.116434 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5s7q8\" (UniqueName: \"kubernetes.io/projected/b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4-kube-api-access-5s7q8\") pod \"controller-6968d8fdc4-7kdtd\" (UID: \"b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4\") " pod="metallb-system/controller-6968d8fdc4-7kdtd" Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.151341 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-ltddp" Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.167508 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-czj6m" Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.241710 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-7kdtd" Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.564884 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-7kdtd"] Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.593184 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/306c1a4f-3d28-4cc9-91bd-a78c25803845-memberlist\") pod \"speaker-pf2fp\" (UID: \"306c1a4f-3d28-4cc9-91bd-a78c25803845\") " pod="metallb-system/speaker-pf2fp" Feb 02 11:06:58 crc kubenswrapper[4838]: E0202 11:06:58.593791 4838 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Feb 02 11:06:58 crc kubenswrapper[4838]: E0202 11:06:58.594093 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/306c1a4f-3d28-4cc9-91bd-a78c25803845-memberlist podName:306c1a4f-3d28-4cc9-91bd-a78c25803845 nodeName:}" failed. No retries permitted until 2026-02-02 11:06:59.593884572 +0000 UTC m=+813.930985610 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/306c1a4f-3d28-4cc9-91bd-a78c25803845-memberlist") pod "speaker-pf2fp" (UID: "306c1a4f-3d28-4cc9-91bd-a78c25803845") : secret "metallb-memberlist" not found Feb 02 11:06:58 crc kubenswrapper[4838]: I0202 11:06:58.746107 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-czj6m"] Feb 02 11:06:58 crc kubenswrapper[4838]: W0202 11:06:58.754541 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod80562d51_943d_4213_abbd_099b4e891ce9.slice/crio-fec2cd9d98ffeaab730872b854f9facc38370a665a9f337b08145500debc8156 WatchSource:0}: Error finding container fec2cd9d98ffeaab730872b854f9facc38370a665a9f337b08145500debc8156: Status 404 returned error can't find the container with id fec2cd9d98ffeaab730872b854f9facc38370a665a9f337b08145500debc8156 Feb 02 11:06:59 crc kubenswrapper[4838]: I0202 11:06:59.083403 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-ltddp" event={"ID":"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6","Type":"ContainerStarted","Data":"e44a8f4f4dde3ea5d245e1fa9d6f8afcf958a262abdd6755d01953c0b64d3db2"} Feb 02 11:06:59 crc kubenswrapper[4838]: I0202 11:06:59.088428 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-7kdtd" event={"ID":"b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4","Type":"ContainerStarted","Data":"c46118411bc80c2414cf2e496eb99bef3399cb37c6bb80fe805f17c758b5984b"} Feb 02 11:06:59 crc kubenswrapper[4838]: I0202 11:06:59.088490 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-7kdtd" event={"ID":"b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4","Type":"ContainerStarted","Data":"aca70dfa44c7e0949fa1984ad725cfbc4ab9574a24031588f50659633126ffa8"} Feb 02 11:06:59 crc kubenswrapper[4838]: I0202 11:06:59.088510 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-7kdtd" event={"ID":"b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4","Type":"ContainerStarted","Data":"d9f2990bd4e4f0eb96b8239d0a3a2bfb6721dd2601b5b556dc545ec59e49b528"} Feb 02 11:06:59 crc kubenswrapper[4838]: I0202 11:06:59.088643 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6968d8fdc4-7kdtd" Feb 02 11:06:59 crc kubenswrapper[4838]: I0202 11:06:59.091485 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-czj6m" event={"ID":"80562d51-943d-4213-abbd-099b4e891ce9","Type":"ContainerStarted","Data":"fec2cd9d98ffeaab730872b854f9facc38370a665a9f337b08145500debc8156"} Feb 02 11:06:59 crc kubenswrapper[4838]: I0202 11:06:59.118941 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6968d8fdc4-7kdtd" podStartSLOduration=2.118916308 podStartE2EDuration="2.118916308s" podCreationTimestamp="2026-02-02 11:06:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:06:59.106977414 +0000 UTC m=+813.444078522" watchObservedRunningTime="2026-02-02 11:06:59.118916308 +0000 UTC m=+813.456017396" Feb 02 11:06:59 crc kubenswrapper[4838]: I0202 11:06:59.607046 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/306c1a4f-3d28-4cc9-91bd-a78c25803845-memberlist\") pod \"speaker-pf2fp\" (UID: \"306c1a4f-3d28-4cc9-91bd-a78c25803845\") " pod="metallb-system/speaker-pf2fp" Feb 02 11:06:59 crc kubenswrapper[4838]: I0202 11:06:59.613769 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/306c1a4f-3d28-4cc9-91bd-a78c25803845-memberlist\") pod \"speaker-pf2fp\" (UID: \"306c1a4f-3d28-4cc9-91bd-a78c25803845\") " pod="metallb-system/speaker-pf2fp" Feb 02 11:06:59 crc kubenswrapper[4838]: I0202 11:06:59.727150 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-pf2fp" Feb 02 11:06:59 crc kubenswrapper[4838]: W0202 11:06:59.748563 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod306c1a4f_3d28_4cc9_91bd_a78c25803845.slice/crio-0b0ad0e22c8fef74a584a1aa80f231e2a50bc712d85c880abba91d7c8e66614a WatchSource:0}: Error finding container 0b0ad0e22c8fef74a584a1aa80f231e2a50bc712d85c880abba91d7c8e66614a: Status 404 returned error can't find the container with id 0b0ad0e22c8fef74a584a1aa80f231e2a50bc712d85c880abba91d7c8e66614a Feb 02 11:07:00 crc kubenswrapper[4838]: I0202 11:07:00.101204 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-pf2fp" event={"ID":"306c1a4f-3d28-4cc9-91bd-a78c25803845","Type":"ContainerStarted","Data":"fb95740e915e98e84a91f57c4c9d9d9b4f09c1a5eca1bd5d4222be2e951421a1"} Feb 02 11:07:00 crc kubenswrapper[4838]: I0202 11:07:00.101254 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-pf2fp" event={"ID":"306c1a4f-3d28-4cc9-91bd-a78c25803845","Type":"ContainerStarted","Data":"0b0ad0e22c8fef74a584a1aa80f231e2a50bc712d85c880abba91d7c8e66614a"} Feb 02 11:07:01 crc kubenswrapper[4838]: I0202 11:07:01.119284 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-pf2fp" event={"ID":"306c1a4f-3d28-4cc9-91bd-a78c25803845","Type":"ContainerStarted","Data":"90a0e5265c22066ee48ec0dae5d1ec7158aa418c85bfae9609149174b07dd846"} Feb 02 11:07:01 crc kubenswrapper[4838]: I0202 11:07:01.119489 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-pf2fp" Feb 02 11:07:01 crc kubenswrapper[4838]: I0202 11:07:01.134294 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-pf2fp" podStartSLOduration=4.134274354 podStartE2EDuration="4.134274354s" podCreationTimestamp="2026-02-02 11:06:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:07:01.132063836 +0000 UTC m=+815.469164864" watchObservedRunningTime="2026-02-02 11:07:01.134274354 +0000 UTC m=+815.471375382" Feb 02 11:07:07 crc kubenswrapper[4838]: I0202 11:07:07.176124 4838 generic.go:334] "Generic (PLEG): container finished" podID="b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6" containerID="479de638e12559cdb5aad3e04b7b27c929d224cefe882066306791b08a8c6a00" exitCode=0 Feb 02 11:07:07 crc kubenswrapper[4838]: I0202 11:07:07.176255 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-ltddp" event={"ID":"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6","Type":"ContainerDied","Data":"479de638e12559cdb5aad3e04b7b27c929d224cefe882066306791b08a8c6a00"} Feb 02 11:07:07 crc kubenswrapper[4838]: I0202 11:07:07.181129 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-czj6m" event={"ID":"80562d51-943d-4213-abbd-099b4e891ce9","Type":"ContainerStarted","Data":"f0281d144485942f3809abb1d46bb657074d0004ea67a454ede2310e0d4b494a"} Feb 02 11:07:07 crc kubenswrapper[4838]: I0202 11:07:07.181358 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-czj6m" Feb 02 11:07:07 crc kubenswrapper[4838]: I0202 11:07:07.243419 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-czj6m" podStartSLOduration=2.702661096 podStartE2EDuration="10.243402163s" podCreationTimestamp="2026-02-02 11:06:57 +0000 UTC" firstStartedPulling="2026-02-02 11:06:58.757300973 +0000 UTC m=+813.094402001" lastFinishedPulling="2026-02-02 11:07:06.29804202 +0000 UTC m=+820.635143068" observedRunningTime="2026-02-02 11:07:07.24291467 +0000 UTC m=+821.580015708" watchObservedRunningTime="2026-02-02 11:07:07.243402163 +0000 UTC m=+821.580503191" Feb 02 11:07:08 crc kubenswrapper[4838]: I0202 11:07:08.189530 4838 generic.go:334] "Generic (PLEG): container finished" podID="b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6" containerID="51ec24257e06998e43dbafc1e02dcb078cf91d82337b8dfd705d1b74f17e62dc" exitCode=0 Feb 02 11:07:08 crc kubenswrapper[4838]: I0202 11:07:08.189677 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-ltddp" event={"ID":"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6","Type":"ContainerDied","Data":"51ec24257e06998e43dbafc1e02dcb078cf91d82337b8dfd705d1b74f17e62dc"} Feb 02 11:07:08 crc kubenswrapper[4838]: I0202 11:07:08.246048 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6968d8fdc4-7kdtd" Feb 02 11:07:09 crc kubenswrapper[4838]: I0202 11:07:09.198235 4838 generic.go:334] "Generic (PLEG): container finished" podID="b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6" containerID="8375e6a69fe4ea4d2b690d17509c137970fe94cbabc129ecd7899d31f20445de" exitCode=0 Feb 02 11:07:09 crc kubenswrapper[4838]: I0202 11:07:09.198534 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-ltddp" event={"ID":"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6","Type":"ContainerDied","Data":"8375e6a69fe4ea4d2b690d17509c137970fe94cbabc129ecd7899d31f20445de"} Feb 02 11:07:10 crc kubenswrapper[4838]: I0202 11:07:10.214238 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-ltddp" event={"ID":"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6","Type":"ContainerStarted","Data":"1af5841e7a2a2e73e7b08e86b50f80a23e0716ff6980d464885f919bf1c740ff"} Feb 02 11:07:10 crc kubenswrapper[4838]: I0202 11:07:10.214636 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-ltddp" event={"ID":"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6","Type":"ContainerStarted","Data":"0ff35f2ac41e8ecaaacc6a6420611e892366e33c9dc32d158817c143e389eb4f"} Feb 02 11:07:10 crc kubenswrapper[4838]: I0202 11:07:10.214650 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-ltddp" event={"ID":"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6","Type":"ContainerStarted","Data":"a2ceb3481b5a89c9dce784530aafc277726fb374bc8be676ad2c5247089aa0d4"} Feb 02 11:07:12 crc kubenswrapper[4838]: I0202 11:07:12.232880 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-ltddp" event={"ID":"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6","Type":"ContainerStarted","Data":"f4fe4d1831d13e63915b160bb07041238cc12083339a7c445ddd0fed297b6792"} Feb 02 11:07:12 crc kubenswrapper[4838]: I0202 11:07:12.233159 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-ltddp" event={"ID":"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6","Type":"ContainerStarted","Data":"abf1047e0b7778ccd5d58f6c23f65ac8c50e5ef9abefc8f098e9b1520aca3daa"} Feb 02 11:07:13 crc kubenswrapper[4838]: I0202 11:07:13.243664 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-ltddp" event={"ID":"b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6","Type":"ContainerStarted","Data":"b230630a5f535a2e4f466eb088a31697870b93fccbfd78735b9df8965b26161a"} Feb 02 11:07:13 crc kubenswrapper[4838]: I0202 11:07:13.244038 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-ltddp" Feb 02 11:07:13 crc kubenswrapper[4838]: I0202 11:07:13.276551 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-ltddp" podStartSLOduration=8.385658494 podStartE2EDuration="16.276533185s" podCreationTimestamp="2026-02-02 11:06:57 +0000 UTC" firstStartedPulling="2026-02-02 11:06:58.361386427 +0000 UTC m=+812.698487455" lastFinishedPulling="2026-02-02 11:07:06.252261108 +0000 UTC m=+820.589362146" observedRunningTime="2026-02-02 11:07:13.269595853 +0000 UTC m=+827.606696911" watchObservedRunningTime="2026-02-02 11:07:13.276533185 +0000 UTC m=+827.613634213" Feb 02 11:07:18 crc kubenswrapper[4838]: I0202 11:07:18.152739 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-ltddp" Feb 02 11:07:18 crc kubenswrapper[4838]: I0202 11:07:18.173727 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-czj6m" Feb 02 11:07:18 crc kubenswrapper[4838]: I0202 11:07:18.195141 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-ltddp" Feb 02 11:07:19 crc kubenswrapper[4838]: I0202 11:07:19.729642 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-pf2fp" Feb 02 11:07:22 crc kubenswrapper[4838]: I0202 11:07:22.530032 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-fz5mx"] Feb 02 11:07:22 crc kubenswrapper[4838]: I0202 11:07:22.531043 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-fz5mx" Feb 02 11:07:22 crc kubenswrapper[4838]: I0202 11:07:22.532854 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-b4xbw" Feb 02 11:07:22 crc kubenswrapper[4838]: I0202 11:07:22.534566 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Feb 02 11:07:22 crc kubenswrapper[4838]: I0202 11:07:22.534991 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Feb 02 11:07:22 crc kubenswrapper[4838]: I0202 11:07:22.554760 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-fz5mx"] Feb 02 11:07:22 crc kubenswrapper[4838]: I0202 11:07:22.572564 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6glqn\" (UniqueName: \"kubernetes.io/projected/3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b-kube-api-access-6glqn\") pod \"openstack-operator-index-fz5mx\" (UID: \"3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b\") " pod="openstack-operators/openstack-operator-index-fz5mx" Feb 02 11:07:22 crc kubenswrapper[4838]: I0202 11:07:22.673581 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6glqn\" (UniqueName: \"kubernetes.io/projected/3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b-kube-api-access-6glqn\") pod \"openstack-operator-index-fz5mx\" (UID: \"3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b\") " pod="openstack-operators/openstack-operator-index-fz5mx" Feb 02 11:07:22 crc kubenswrapper[4838]: I0202 11:07:22.694707 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6glqn\" (UniqueName: \"kubernetes.io/projected/3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b-kube-api-access-6glqn\") pod \"openstack-operator-index-fz5mx\" (UID: \"3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b\") " pod="openstack-operators/openstack-operator-index-fz5mx" Feb 02 11:07:22 crc kubenswrapper[4838]: I0202 11:07:22.864830 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-fz5mx" Feb 02 11:07:23 crc kubenswrapper[4838]: I0202 11:07:23.264601 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-fz5mx"] Feb 02 11:07:23 crc kubenswrapper[4838]: W0202 11:07:23.271914 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3994b6ed_2b8c_4519_bb0c_a87c36d8cd7b.slice/crio-ac84a6c43e8ad8489df88f57f0d23619f2fca05672a74baaee11473072ad272d WatchSource:0}: Error finding container ac84a6c43e8ad8489df88f57f0d23619f2fca05672a74baaee11473072ad272d: Status 404 returned error can't find the container with id ac84a6c43e8ad8489df88f57f0d23619f2fca05672a74baaee11473072ad272d Feb 02 11:07:23 crc kubenswrapper[4838]: I0202 11:07:23.312299 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-fz5mx" event={"ID":"3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b","Type":"ContainerStarted","Data":"ac84a6c43e8ad8489df88f57f0d23619f2fca05672a74baaee11473072ad272d"} Feb 02 11:07:25 crc kubenswrapper[4838]: I0202 11:07:25.900185 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-fz5mx"] Feb 02 11:07:26 crc kubenswrapper[4838]: I0202 11:07:26.515882 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-svvg4"] Feb 02 11:07:26 crc kubenswrapper[4838]: I0202 11:07:26.516534 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-svvg4" Feb 02 11:07:26 crc kubenswrapper[4838]: I0202 11:07:26.517328 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-svvg4"] Feb 02 11:07:26 crc kubenswrapper[4838]: I0202 11:07:26.658008 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfmlh\" (UniqueName: \"kubernetes.io/projected/11d5435a-6771-4a54-b3f1-1f4f6bd2c123-kube-api-access-nfmlh\") pod \"openstack-operator-index-svvg4\" (UID: \"11d5435a-6771-4a54-b3f1-1f4f6bd2c123\") " pod="openstack-operators/openstack-operator-index-svvg4" Feb 02 11:07:26 crc kubenswrapper[4838]: I0202 11:07:26.759588 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfmlh\" (UniqueName: \"kubernetes.io/projected/11d5435a-6771-4a54-b3f1-1f4f6bd2c123-kube-api-access-nfmlh\") pod \"openstack-operator-index-svvg4\" (UID: \"11d5435a-6771-4a54-b3f1-1f4f6bd2c123\") " pod="openstack-operators/openstack-operator-index-svvg4" Feb 02 11:07:26 crc kubenswrapper[4838]: I0202 11:07:26.782582 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfmlh\" (UniqueName: \"kubernetes.io/projected/11d5435a-6771-4a54-b3f1-1f4f6bd2c123-kube-api-access-nfmlh\") pod \"openstack-operator-index-svvg4\" (UID: \"11d5435a-6771-4a54-b3f1-1f4f6bd2c123\") " pod="openstack-operators/openstack-operator-index-svvg4" Feb 02 11:07:26 crc kubenswrapper[4838]: I0202 11:07:26.882517 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-svvg4" Feb 02 11:07:27 crc kubenswrapper[4838]: I0202 11:07:27.397811 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-svvg4"] Feb 02 11:07:28 crc kubenswrapper[4838]: I0202 11:07:28.155305 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-ltddp" Feb 02 11:07:28 crc kubenswrapper[4838]: I0202 11:07:28.349666 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-fz5mx" event={"ID":"3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b","Type":"ContainerStarted","Data":"0dc0ad8835559ff17e87cac429c120a210b5150e32ffc67d5cf7adedbc7e4610"} Feb 02 11:07:28 crc kubenswrapper[4838]: I0202 11:07:28.349748 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-fz5mx" podUID="3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b" containerName="registry-server" containerID="cri-o://0dc0ad8835559ff17e87cac429c120a210b5150e32ffc67d5cf7adedbc7e4610" gracePeriod=2 Feb 02 11:07:28 crc kubenswrapper[4838]: I0202 11:07:28.351816 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-svvg4" event={"ID":"11d5435a-6771-4a54-b3f1-1f4f6bd2c123","Type":"ContainerStarted","Data":"d882a8fd9bd520cc3992f47414d2c5bfd6deeb0d816ffd45c51c0a93ebc254ea"} Feb 02 11:07:28 crc kubenswrapper[4838]: I0202 11:07:28.351929 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-svvg4" event={"ID":"11d5435a-6771-4a54-b3f1-1f4f6bd2c123","Type":"ContainerStarted","Data":"72a6c7080afd0529f16c4ee32535b9b17bed38cf579ea8fd009ba679cf7eddca"} Feb 02 11:07:28 crc kubenswrapper[4838]: I0202 11:07:28.375468 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-fz5mx" podStartSLOduration=2.350895306 podStartE2EDuration="6.375453349s" podCreationTimestamp="2026-02-02 11:07:22 +0000 UTC" firstStartedPulling="2026-02-02 11:07:23.27479922 +0000 UTC m=+837.611900248" lastFinishedPulling="2026-02-02 11:07:27.299357263 +0000 UTC m=+841.636458291" observedRunningTime="2026-02-02 11:07:28.365262291 +0000 UTC m=+842.702363329" watchObservedRunningTime="2026-02-02 11:07:28.375453349 +0000 UTC m=+842.712554377" Feb 02 11:07:28 crc kubenswrapper[4838]: I0202 11:07:28.391970 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-svvg4" podStartSLOduration=2.318059972 podStartE2EDuration="2.391912171s" podCreationTimestamp="2026-02-02 11:07:26 +0000 UTC" firstStartedPulling="2026-02-02 11:07:27.407697048 +0000 UTC m=+841.744798076" lastFinishedPulling="2026-02-02 11:07:27.481549247 +0000 UTC m=+841.818650275" observedRunningTime="2026-02-02 11:07:28.385832371 +0000 UTC m=+842.722933399" watchObservedRunningTime="2026-02-02 11:07:28.391912171 +0000 UTC m=+842.729013209" Feb 02 11:07:29 crc kubenswrapper[4838]: I0202 11:07:29.307137 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-fz5mx" Feb 02 11:07:29 crc kubenswrapper[4838]: I0202 11:07:29.358542 4838 generic.go:334] "Generic (PLEG): container finished" podID="3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b" containerID="0dc0ad8835559ff17e87cac429c120a210b5150e32ffc67d5cf7adedbc7e4610" exitCode=0 Feb 02 11:07:29 crc kubenswrapper[4838]: I0202 11:07:29.358639 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-fz5mx" Feb 02 11:07:29 crc kubenswrapper[4838]: I0202 11:07:29.358641 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-fz5mx" event={"ID":"3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b","Type":"ContainerDied","Data":"0dc0ad8835559ff17e87cac429c120a210b5150e32ffc67d5cf7adedbc7e4610"} Feb 02 11:07:29 crc kubenswrapper[4838]: I0202 11:07:29.358728 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-fz5mx" event={"ID":"3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b","Type":"ContainerDied","Data":"ac84a6c43e8ad8489df88f57f0d23619f2fca05672a74baaee11473072ad272d"} Feb 02 11:07:29 crc kubenswrapper[4838]: I0202 11:07:29.358758 4838 scope.go:117] "RemoveContainer" containerID="0dc0ad8835559ff17e87cac429c120a210b5150e32ffc67d5cf7adedbc7e4610" Feb 02 11:07:29 crc kubenswrapper[4838]: I0202 11:07:29.379502 4838 scope.go:117] "RemoveContainer" containerID="0dc0ad8835559ff17e87cac429c120a210b5150e32ffc67d5cf7adedbc7e4610" Feb 02 11:07:29 crc kubenswrapper[4838]: E0202 11:07:29.380039 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0dc0ad8835559ff17e87cac429c120a210b5150e32ffc67d5cf7adedbc7e4610\": container with ID starting with 0dc0ad8835559ff17e87cac429c120a210b5150e32ffc67d5cf7adedbc7e4610 not found: ID does not exist" containerID="0dc0ad8835559ff17e87cac429c120a210b5150e32ffc67d5cf7adedbc7e4610" Feb 02 11:07:29 crc kubenswrapper[4838]: I0202 11:07:29.380089 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dc0ad8835559ff17e87cac429c120a210b5150e32ffc67d5cf7adedbc7e4610"} err="failed to get container status \"0dc0ad8835559ff17e87cac429c120a210b5150e32ffc67d5cf7adedbc7e4610\": rpc error: code = NotFound desc = could not find container \"0dc0ad8835559ff17e87cac429c120a210b5150e32ffc67d5cf7adedbc7e4610\": container with ID starting with 0dc0ad8835559ff17e87cac429c120a210b5150e32ffc67d5cf7adedbc7e4610 not found: ID does not exist" Feb 02 11:07:29 crc kubenswrapper[4838]: I0202 11:07:29.392803 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6glqn\" (UniqueName: \"kubernetes.io/projected/3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b-kube-api-access-6glqn\") pod \"3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b\" (UID: \"3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b\") " Feb 02 11:07:29 crc kubenswrapper[4838]: I0202 11:07:29.398977 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b-kube-api-access-6glqn" (OuterVolumeSpecName: "kube-api-access-6glqn") pod "3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b" (UID: "3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b"). InnerVolumeSpecName "kube-api-access-6glqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:07:29 crc kubenswrapper[4838]: I0202 11:07:29.494649 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6glqn\" (UniqueName: \"kubernetes.io/projected/3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b-kube-api-access-6glqn\") on node \"crc\" DevicePath \"\"" Feb 02 11:07:29 crc kubenswrapper[4838]: I0202 11:07:29.693796 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-fz5mx"] Feb 02 11:07:29 crc kubenswrapper[4838]: I0202 11:07:29.695088 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-fz5mx"] Feb 02 11:07:30 crc kubenswrapper[4838]: I0202 11:07:30.513652 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b" path="/var/lib/kubelet/pods/3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b/volumes" Feb 02 11:07:36 crc kubenswrapper[4838]: I0202 11:07:36.883246 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-svvg4" Feb 02 11:07:36 crc kubenswrapper[4838]: I0202 11:07:36.883898 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-svvg4" Feb 02 11:07:36 crc kubenswrapper[4838]: I0202 11:07:36.925138 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-svvg4" Feb 02 11:07:37 crc kubenswrapper[4838]: I0202 11:07:37.443262 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-svvg4" Feb 02 11:07:42 crc kubenswrapper[4838]: I0202 11:07:42.638045 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97"] Feb 02 11:07:42 crc kubenswrapper[4838]: E0202 11:07:42.638801 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b" containerName="registry-server" Feb 02 11:07:42 crc kubenswrapper[4838]: I0202 11:07:42.638816 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b" containerName="registry-server" Feb 02 11:07:42 crc kubenswrapper[4838]: I0202 11:07:42.638954 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="3994b6ed-2b8c-4519-bb0c-a87c36d8cd7b" containerName="registry-server" Feb 02 11:07:42 crc kubenswrapper[4838]: I0202 11:07:42.640019 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97" Feb 02 11:07:42 crc kubenswrapper[4838]: I0202 11:07:42.645077 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-qntmq" Feb 02 11:07:42 crc kubenswrapper[4838]: I0202 11:07:42.650272 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97"] Feb 02 11:07:42 crc kubenswrapper[4838]: I0202 11:07:42.789772 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a0ffc923-92b7-4528-963f-bb993ecb20c1-util\") pod \"07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97\" (UID: \"a0ffc923-92b7-4528-963f-bb993ecb20c1\") " pod="openstack-operators/07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97" Feb 02 11:07:42 crc kubenswrapper[4838]: I0202 11:07:42.789839 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a0ffc923-92b7-4528-963f-bb993ecb20c1-bundle\") pod \"07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97\" (UID: \"a0ffc923-92b7-4528-963f-bb993ecb20c1\") " pod="openstack-operators/07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97" Feb 02 11:07:42 crc kubenswrapper[4838]: I0202 11:07:42.790052 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dg8m6\" (UniqueName: \"kubernetes.io/projected/a0ffc923-92b7-4528-963f-bb993ecb20c1-kube-api-access-dg8m6\") pod \"07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97\" (UID: \"a0ffc923-92b7-4528-963f-bb993ecb20c1\") " pod="openstack-operators/07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97" Feb 02 11:07:42 crc kubenswrapper[4838]: I0202 11:07:42.891377 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a0ffc923-92b7-4528-963f-bb993ecb20c1-bundle\") pod \"07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97\" (UID: \"a0ffc923-92b7-4528-963f-bb993ecb20c1\") " pod="openstack-operators/07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97" Feb 02 11:07:42 crc kubenswrapper[4838]: I0202 11:07:42.891510 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dg8m6\" (UniqueName: \"kubernetes.io/projected/a0ffc923-92b7-4528-963f-bb993ecb20c1-kube-api-access-dg8m6\") pod \"07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97\" (UID: \"a0ffc923-92b7-4528-963f-bb993ecb20c1\") " pod="openstack-operators/07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97" Feb 02 11:07:42 crc kubenswrapper[4838]: I0202 11:07:42.891562 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a0ffc923-92b7-4528-963f-bb993ecb20c1-util\") pod \"07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97\" (UID: \"a0ffc923-92b7-4528-963f-bb993ecb20c1\") " pod="openstack-operators/07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97" Feb 02 11:07:42 crc kubenswrapper[4838]: I0202 11:07:42.892119 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a0ffc923-92b7-4528-963f-bb993ecb20c1-util\") pod \"07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97\" (UID: \"a0ffc923-92b7-4528-963f-bb993ecb20c1\") " pod="openstack-operators/07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97" Feb 02 11:07:42 crc kubenswrapper[4838]: I0202 11:07:42.892144 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a0ffc923-92b7-4528-963f-bb993ecb20c1-bundle\") pod \"07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97\" (UID: \"a0ffc923-92b7-4528-963f-bb993ecb20c1\") " pod="openstack-operators/07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97" Feb 02 11:07:42 crc kubenswrapper[4838]: I0202 11:07:42.914835 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dg8m6\" (UniqueName: \"kubernetes.io/projected/a0ffc923-92b7-4528-963f-bb993ecb20c1-kube-api-access-dg8m6\") pod \"07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97\" (UID: \"a0ffc923-92b7-4528-963f-bb993ecb20c1\") " pod="openstack-operators/07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97" Feb 02 11:07:43 crc kubenswrapper[4838]: I0202 11:07:43.004323 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97" Feb 02 11:07:43 crc kubenswrapper[4838]: I0202 11:07:43.404866 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97"] Feb 02 11:07:43 crc kubenswrapper[4838]: I0202 11:07:43.452243 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97" event={"ID":"a0ffc923-92b7-4528-963f-bb993ecb20c1","Type":"ContainerStarted","Data":"0d85a2bdd52e9ddb07f83157982770db93d023774339453360873c2d39d8c26e"} Feb 02 11:07:44 crc kubenswrapper[4838]: I0202 11:07:44.462119 4838 generic.go:334] "Generic (PLEG): container finished" podID="a0ffc923-92b7-4528-963f-bb993ecb20c1" containerID="e80cba7406a8e78208da581f9662302960edcbe4c38cf94c0683b8347b07be47" exitCode=0 Feb 02 11:07:44 crc kubenswrapper[4838]: I0202 11:07:44.462170 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97" event={"ID":"a0ffc923-92b7-4528-963f-bb993ecb20c1","Type":"ContainerDied","Data":"e80cba7406a8e78208da581f9662302960edcbe4c38cf94c0683b8347b07be47"} Feb 02 11:07:45 crc kubenswrapper[4838]: I0202 11:07:45.470468 4838 generic.go:334] "Generic (PLEG): container finished" podID="a0ffc923-92b7-4528-963f-bb993ecb20c1" containerID="93a7ce264fd55dbd65a419f06dc8241aa4e7ed7da9a70d3bbc158ce3d2ac2cd2" exitCode=0 Feb 02 11:07:45 crc kubenswrapper[4838]: I0202 11:07:45.470505 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97" event={"ID":"a0ffc923-92b7-4528-963f-bb993ecb20c1","Type":"ContainerDied","Data":"93a7ce264fd55dbd65a419f06dc8241aa4e7ed7da9a70d3bbc158ce3d2ac2cd2"} Feb 02 11:07:46 crc kubenswrapper[4838]: I0202 11:07:46.490541 4838 generic.go:334] "Generic (PLEG): container finished" podID="a0ffc923-92b7-4528-963f-bb993ecb20c1" containerID="78750072f1f420298834428a6bcf85bcd0c7afbbf17f88f8b8230032b39e8755" exitCode=0 Feb 02 11:07:46 crc kubenswrapper[4838]: I0202 11:07:46.490711 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97" event={"ID":"a0ffc923-92b7-4528-963f-bb993ecb20c1","Type":"ContainerDied","Data":"78750072f1f420298834428a6bcf85bcd0c7afbbf17f88f8b8230032b39e8755"} Feb 02 11:07:47 crc kubenswrapper[4838]: I0202 11:07:47.779677 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97" Feb 02 11:07:47 crc kubenswrapper[4838]: I0202 11:07:47.875478 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a0ffc923-92b7-4528-963f-bb993ecb20c1-util\") pod \"a0ffc923-92b7-4528-963f-bb993ecb20c1\" (UID: \"a0ffc923-92b7-4528-963f-bb993ecb20c1\") " Feb 02 11:07:47 crc kubenswrapper[4838]: I0202 11:07:47.875749 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dg8m6\" (UniqueName: \"kubernetes.io/projected/a0ffc923-92b7-4528-963f-bb993ecb20c1-kube-api-access-dg8m6\") pod \"a0ffc923-92b7-4528-963f-bb993ecb20c1\" (UID: \"a0ffc923-92b7-4528-963f-bb993ecb20c1\") " Feb 02 11:07:47 crc kubenswrapper[4838]: I0202 11:07:47.875785 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a0ffc923-92b7-4528-963f-bb993ecb20c1-bundle\") pod \"a0ffc923-92b7-4528-963f-bb993ecb20c1\" (UID: \"a0ffc923-92b7-4528-963f-bb993ecb20c1\") " Feb 02 11:07:47 crc kubenswrapper[4838]: I0202 11:07:47.877006 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0ffc923-92b7-4528-963f-bb993ecb20c1-bundle" (OuterVolumeSpecName: "bundle") pod "a0ffc923-92b7-4528-963f-bb993ecb20c1" (UID: "a0ffc923-92b7-4528-963f-bb993ecb20c1"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:07:47 crc kubenswrapper[4838]: I0202 11:07:47.882841 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0ffc923-92b7-4528-963f-bb993ecb20c1-kube-api-access-dg8m6" (OuterVolumeSpecName: "kube-api-access-dg8m6") pod "a0ffc923-92b7-4528-963f-bb993ecb20c1" (UID: "a0ffc923-92b7-4528-963f-bb993ecb20c1"). InnerVolumeSpecName "kube-api-access-dg8m6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:07:47 crc kubenswrapper[4838]: I0202 11:07:47.895483 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0ffc923-92b7-4528-963f-bb993ecb20c1-util" (OuterVolumeSpecName: "util") pod "a0ffc923-92b7-4528-963f-bb993ecb20c1" (UID: "a0ffc923-92b7-4528-963f-bb993ecb20c1"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:07:47 crc kubenswrapper[4838]: I0202 11:07:47.977817 4838 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a0ffc923-92b7-4528-963f-bb993ecb20c1-util\") on node \"crc\" DevicePath \"\"" Feb 02 11:07:47 crc kubenswrapper[4838]: I0202 11:07:47.977870 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dg8m6\" (UniqueName: \"kubernetes.io/projected/a0ffc923-92b7-4528-963f-bb993ecb20c1-kube-api-access-dg8m6\") on node \"crc\" DevicePath \"\"" Feb 02 11:07:47 crc kubenswrapper[4838]: I0202 11:07:47.977885 4838 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a0ffc923-92b7-4528-963f-bb993ecb20c1-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:07:48 crc kubenswrapper[4838]: I0202 11:07:48.512160 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97" Feb 02 11:07:48 crc kubenswrapper[4838]: I0202 11:07:48.517117 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97" event={"ID":"a0ffc923-92b7-4528-963f-bb993ecb20c1","Type":"ContainerDied","Data":"0d85a2bdd52e9ddb07f83157982770db93d023774339453360873c2d39d8c26e"} Feb 02 11:07:48 crc kubenswrapper[4838]: I0202 11:07:48.517181 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d85a2bdd52e9ddb07f83157982770db93d023774339453360873c2d39d8c26e" Feb 02 11:07:55 crc kubenswrapper[4838]: I0202 11:07:55.194759 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-6d5fdc6cdc-dvqjc"] Feb 02 11:07:55 crc kubenswrapper[4838]: E0202 11:07:55.195466 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0ffc923-92b7-4528-963f-bb993ecb20c1" containerName="pull" Feb 02 11:07:55 crc kubenswrapper[4838]: I0202 11:07:55.195479 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0ffc923-92b7-4528-963f-bb993ecb20c1" containerName="pull" Feb 02 11:07:55 crc kubenswrapper[4838]: E0202 11:07:55.195488 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0ffc923-92b7-4528-963f-bb993ecb20c1" containerName="extract" Feb 02 11:07:55 crc kubenswrapper[4838]: I0202 11:07:55.195494 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0ffc923-92b7-4528-963f-bb993ecb20c1" containerName="extract" Feb 02 11:07:55 crc kubenswrapper[4838]: E0202 11:07:55.195513 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0ffc923-92b7-4528-963f-bb993ecb20c1" containerName="util" Feb 02 11:07:55 crc kubenswrapper[4838]: I0202 11:07:55.195520 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0ffc923-92b7-4528-963f-bb993ecb20c1" containerName="util" Feb 02 11:07:55 crc kubenswrapper[4838]: I0202 11:07:55.195639 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0ffc923-92b7-4528-963f-bb993ecb20c1" containerName="extract" Feb 02 11:07:55 crc kubenswrapper[4838]: I0202 11:07:55.196033 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-6d5fdc6cdc-dvqjc" Feb 02 11:07:55 crc kubenswrapper[4838]: I0202 11:07:55.198588 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-n5mhx" Feb 02 11:07:55 crc kubenswrapper[4838]: I0202 11:07:55.218601 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-6d5fdc6cdc-dvqjc"] Feb 02 11:07:55 crc kubenswrapper[4838]: I0202 11:07:55.274696 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5j8t\" (UniqueName: \"kubernetes.io/projected/228bb6dc-ac19-4dd3-aaa7-265cc00de1c9-kube-api-access-q5j8t\") pod \"openstack-operator-controller-init-6d5fdc6cdc-dvqjc\" (UID: \"228bb6dc-ac19-4dd3-aaa7-265cc00de1c9\") " pod="openstack-operators/openstack-operator-controller-init-6d5fdc6cdc-dvqjc" Feb 02 11:07:55 crc kubenswrapper[4838]: I0202 11:07:55.375824 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5j8t\" (UniqueName: \"kubernetes.io/projected/228bb6dc-ac19-4dd3-aaa7-265cc00de1c9-kube-api-access-q5j8t\") pod \"openstack-operator-controller-init-6d5fdc6cdc-dvqjc\" (UID: \"228bb6dc-ac19-4dd3-aaa7-265cc00de1c9\") " pod="openstack-operators/openstack-operator-controller-init-6d5fdc6cdc-dvqjc" Feb 02 11:07:55 crc kubenswrapper[4838]: I0202 11:07:55.407694 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5j8t\" (UniqueName: \"kubernetes.io/projected/228bb6dc-ac19-4dd3-aaa7-265cc00de1c9-kube-api-access-q5j8t\") pod \"openstack-operator-controller-init-6d5fdc6cdc-dvqjc\" (UID: \"228bb6dc-ac19-4dd3-aaa7-265cc00de1c9\") " pod="openstack-operators/openstack-operator-controller-init-6d5fdc6cdc-dvqjc" Feb 02 11:07:55 crc kubenswrapper[4838]: I0202 11:07:55.511840 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-6d5fdc6cdc-dvqjc" Feb 02 11:07:55 crc kubenswrapper[4838]: I0202 11:07:55.740797 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-6d5fdc6cdc-dvqjc"] Feb 02 11:07:56 crc kubenswrapper[4838]: I0202 11:07:56.561928 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-6d5fdc6cdc-dvqjc" event={"ID":"228bb6dc-ac19-4dd3-aaa7-265cc00de1c9","Type":"ContainerStarted","Data":"d9e1300f331f5ab1cad19bc089f56aafd86cbeab2bf5202b77d4765ae07c7f73"} Feb 02 11:08:01 crc kubenswrapper[4838]: I0202 11:08:01.596261 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-6d5fdc6cdc-dvqjc" event={"ID":"228bb6dc-ac19-4dd3-aaa7-265cc00de1c9","Type":"ContainerStarted","Data":"70bd41faf2e9a4905a3141cb04354f3cbd731ca5c753df0bf0003230782b9070"} Feb 02 11:08:01 crc kubenswrapper[4838]: I0202 11:08:01.596793 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-6d5fdc6cdc-dvqjc" Feb 02 11:08:01 crc kubenswrapper[4838]: I0202 11:08:01.630684 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-6d5fdc6cdc-dvqjc" podStartSLOduration=1.141223117 podStartE2EDuration="6.630669562s" podCreationTimestamp="2026-02-02 11:07:55 +0000 UTC" firstStartedPulling="2026-02-02 11:07:55.754279513 +0000 UTC m=+870.091380541" lastFinishedPulling="2026-02-02 11:08:01.243725958 +0000 UTC m=+875.580826986" observedRunningTime="2026-02-02 11:08:01.621331507 +0000 UTC m=+875.958432535" watchObservedRunningTime="2026-02-02 11:08:01.630669562 +0000 UTC m=+875.967770590" Feb 02 11:08:15 crc kubenswrapper[4838]: I0202 11:08:15.516046 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-6d5fdc6cdc-dvqjc" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.663937 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-db2x5"] Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.665462 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-db2x5" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.667377 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-msd9x" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.670387 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d874c8fc-hm4jh"] Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.671265 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-hm4jh" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.677995 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d874c8fc-hm4jh"] Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.680009 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-j742p" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.689276 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-db2x5"] Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.695547 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-6d9697b7f4-rs64q"] Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.696561 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-rs64q" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.702277 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-hh9w5" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.741457 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2zx9\" (UniqueName: \"kubernetes.io/projected/13ab41db-f38e-4980-89f9-361236526dfa-kube-api-access-w2zx9\") pod \"cinder-operator-controller-manager-8d874c8fc-hm4jh\" (UID: \"13ab41db-f38e-4980-89f9-361236526dfa\") " pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-hm4jh" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.741691 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-6d9697b7f4-rs64q"] Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.741735 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xk2m\" (UniqueName: \"kubernetes.io/projected/e358aab4-cbb0-4522-8740-6646b7fdcabd-kube-api-access-2xk2m\") pod \"designate-operator-controller-manager-6d9697b7f4-rs64q\" (UID: \"e358aab4-cbb0-4522-8740-6646b7fdcabd\") " pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-rs64q" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.741807 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bc7n2\" (UniqueName: \"kubernetes.io/projected/cb4f687b-4b19-447b-beb4-1646c2a40800-kube-api-access-bc7n2\") pod \"barbican-operator-controller-manager-7b6c4d8c5f-db2x5\" (UID: \"cb4f687b-4b19-447b-beb4-1646c2a40800\") " pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-db2x5" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.755199 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-8886f4c47-xftmv"] Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.756184 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-xftmv" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.766055 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-svxq9" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.776394 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-8886f4c47-xftmv"] Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.788415 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-69d6db494d-qrf72"] Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.789339 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-qrf72" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.796976 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-lhmq6" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.817640 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-69d6db494d-qrf72"] Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.828667 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-2kqdq"] Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.829596 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-2kqdq" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.832301 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-26gps" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.842916 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2zx9\" (UniqueName: \"kubernetes.io/projected/13ab41db-f38e-4980-89f9-361236526dfa-kube-api-access-w2zx9\") pod \"cinder-operator-controller-manager-8d874c8fc-hm4jh\" (UID: \"13ab41db-f38e-4980-89f9-361236526dfa\") " pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-hm4jh" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.842981 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8drm5\" (UniqueName: \"kubernetes.io/projected/ccd2dd8e-7b67-4b94-9b9a-b76fab87903c-kube-api-access-8drm5\") pod \"horizon-operator-controller-manager-5fb775575f-2kqdq\" (UID: \"ccd2dd8e-7b67-4b94-9b9a-b76fab87903c\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-2kqdq" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.843019 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xk2m\" (UniqueName: \"kubernetes.io/projected/e358aab4-cbb0-4522-8740-6646b7fdcabd-kube-api-access-2xk2m\") pod \"designate-operator-controller-manager-6d9697b7f4-rs64q\" (UID: \"e358aab4-cbb0-4522-8740-6646b7fdcabd\") " pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-rs64q" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.843053 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bc7n2\" (UniqueName: \"kubernetes.io/projected/cb4f687b-4b19-447b-beb4-1646c2a40800-kube-api-access-bc7n2\") pod \"barbican-operator-controller-manager-7b6c4d8c5f-db2x5\" (UID: \"cb4f687b-4b19-447b-beb4-1646c2a40800\") " pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-db2x5" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.843083 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9kxf\" (UniqueName: \"kubernetes.io/projected/084b46d2-88a9-42e4-83b2-dbccf264aafe-kube-api-access-z9kxf\") pod \"glance-operator-controller-manager-8886f4c47-xftmv\" (UID: \"084b46d2-88a9-42e4-83b2-dbccf264aafe\") " pod="openstack-operators/glance-operator-controller-manager-8886f4c47-xftmv" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.843139 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cn54l\" (UniqueName: \"kubernetes.io/projected/9cfe65eb-c657-4f96-b48f-1c9831fd75ba-kube-api-access-cn54l\") pod \"heat-operator-controller-manager-69d6db494d-qrf72\" (UID: \"9cfe65eb-c657-4f96-b48f-1c9831fd75ba\") " pod="openstack-operators/heat-operator-controller-manager-69d6db494d-qrf72" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.843846 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-2kqdq"] Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.867370 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bc7n2\" (UniqueName: \"kubernetes.io/projected/cb4f687b-4b19-447b-beb4-1646c2a40800-kube-api-access-bc7n2\") pod \"barbican-operator-controller-manager-7b6c4d8c5f-db2x5\" (UID: \"cb4f687b-4b19-447b-beb4-1646c2a40800\") " pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-db2x5" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.873857 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-thn2f"] Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.874530 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-79955696d6-thn2f" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.875109 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xk2m\" (UniqueName: \"kubernetes.io/projected/e358aab4-cbb0-4522-8740-6646b7fdcabd-kube-api-access-2xk2m\") pod \"designate-operator-controller-manager-6d9697b7f4-rs64q\" (UID: \"e358aab4-cbb0-4522-8740-6646b7fdcabd\") " pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-rs64q" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.875511 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2zx9\" (UniqueName: \"kubernetes.io/projected/13ab41db-f38e-4980-89f9-361236526dfa-kube-api-access-w2zx9\") pod \"cinder-operator-controller-manager-8d874c8fc-hm4jh\" (UID: \"13ab41db-f38e-4980-89f9-361236526dfa\") " pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-hm4jh" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.882368 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-sxbjw" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.882648 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.949409 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-thn2f"] Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.950186 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cn54l\" (UniqueName: \"kubernetes.io/projected/9cfe65eb-c657-4f96-b48f-1c9831fd75ba-kube-api-access-cn54l\") pod \"heat-operator-controller-manager-69d6db494d-qrf72\" (UID: \"9cfe65eb-c657-4f96-b48f-1c9831fd75ba\") " pod="openstack-operators/heat-operator-controller-manager-69d6db494d-qrf72" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.950278 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8drm5\" (UniqueName: \"kubernetes.io/projected/ccd2dd8e-7b67-4b94-9b9a-b76fab87903c-kube-api-access-8drm5\") pod \"horizon-operator-controller-manager-5fb775575f-2kqdq\" (UID: \"ccd2dd8e-7b67-4b94-9b9a-b76fab87903c\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-2kqdq" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.950329 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9kxf\" (UniqueName: \"kubernetes.io/projected/084b46d2-88a9-42e4-83b2-dbccf264aafe-kube-api-access-z9kxf\") pod \"glance-operator-controller-manager-8886f4c47-xftmv\" (UID: \"084b46d2-88a9-42e4-83b2-dbccf264aafe\") " pod="openstack-operators/glance-operator-controller-manager-8886f4c47-xftmv" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.958591 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7c6b8858cc-lk5ts"] Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.960032 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7c6b8858cc-lk5ts" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.966704 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-84f48565d4-tn9zr"] Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.968094 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-tn9zr" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.969718 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-bxq9w" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.970087 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-r7snj" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.976468 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cn54l\" (UniqueName: \"kubernetes.io/projected/9cfe65eb-c657-4f96-b48f-1c9831fd75ba-kube-api-access-cn54l\") pod \"heat-operator-controller-manager-69d6db494d-qrf72\" (UID: \"9cfe65eb-c657-4f96-b48f-1c9831fd75ba\") " pod="openstack-operators/heat-operator-controller-manager-69d6db494d-qrf72" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.985752 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-84f48565d4-tn9zr"] Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.988066 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9kxf\" (UniqueName: \"kubernetes.io/projected/084b46d2-88a9-42e4-83b2-dbccf264aafe-kube-api-access-z9kxf\") pod \"glance-operator-controller-manager-8886f4c47-xftmv\" (UID: \"084b46d2-88a9-42e4-83b2-dbccf264aafe\") " pod="openstack-operators/glance-operator-controller-manager-8886f4c47-xftmv" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.993703 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7dd968899f-5s687"] Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.994742 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-5s687" Feb 02 11:08:35 crc kubenswrapper[4838]: I0202 11:08:35.995338 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8drm5\" (UniqueName: \"kubernetes.io/projected/ccd2dd8e-7b67-4b94-9b9a-b76fab87903c-kube-api-access-8drm5\") pod \"horizon-operator-controller-manager-5fb775575f-2kqdq\" (UID: \"ccd2dd8e-7b67-4b94-9b9a-b76fab87903c\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-2kqdq" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.000532 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-jwdj5" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.014524 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-db2x5" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.025137 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7c6b8858cc-lk5ts"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.026606 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-hm4jh" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.034860 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7dd968899f-5s687"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.044682 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-q6xd2"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.045528 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-q6xd2" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.047646 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-r5lx9" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.047811 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-585dbc889-52b6h"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.048581 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-52b6h" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.049163 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-rs64q" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.050515 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-q6xd2"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.051564 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-48hb9" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.051813 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9b8h\" (UniqueName: \"kubernetes.io/projected/79dd465e-2e36-423e-af5b-f41d715c0297-kube-api-access-z9b8h\") pod \"ironic-operator-controller-manager-7c6b8858cc-lk5ts\" (UID: \"79dd465e-2e36-423e-af5b-f41d715c0297\") " pod="openstack-operators/ironic-operator-controller-manager-7c6b8858cc-lk5ts" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.051899 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5e0647d6-93ed-40f1-a522-f5ecf769dd14-cert\") pod \"infra-operator-controller-manager-79955696d6-thn2f\" (UID: \"5e0647d6-93ed-40f1-a522-f5ecf769dd14\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-thn2f" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.052084 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kg4hm\" (UniqueName: \"kubernetes.io/projected/4b7d42b0-25f5-40d4-8deb-34841b6c8c92-kube-api-access-kg4hm\") pod \"keystone-operator-controller-manager-84f48565d4-tn9zr\" (UID: \"4b7d42b0-25f5-40d4-8deb-34841b6c8c92\") " pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-tn9zr" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.052186 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ffdh\" (UniqueName: \"kubernetes.io/projected/5e0647d6-93ed-40f1-a522-f5ecf769dd14-kube-api-access-6ffdh\") pod \"infra-operator-controller-manager-79955696d6-thn2f\" (UID: \"5e0647d6-93ed-40f1-a522-f5ecf769dd14\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-thn2f" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.083698 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-xftmv" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.084678 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-55bff696bd-pms7g"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.085439 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-pms7g" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.088780 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-mzd99" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.094679 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-55bff696bd-pms7g"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.095972 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6687f8d877-hp6js"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.097470 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-hp6js" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.100672 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-585dbc889-52b6h"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.100916 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-z4567" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.104995 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6687f8d877-hp6js"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.109450 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-lbjxl"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.110466 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lbjxl" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.110690 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-qrf72" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.113010 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-rsms8" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.120186 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.121047 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.122101 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-26qpp" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.131113 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-lbjxl"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.136099 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.137974 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-kbj4l"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.140051 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kbj4l" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.142542 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-cv4f4" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.156094 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-2kqdq" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.157172 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ffdh\" (UniqueName: \"kubernetes.io/projected/5e0647d6-93ed-40f1-a522-f5ecf769dd14-kube-api-access-6ffdh\") pod \"infra-operator-controller-manager-79955696d6-thn2f\" (UID: \"5e0647d6-93ed-40f1-a522-f5ecf769dd14\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-thn2f" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.157202 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9b8h\" (UniqueName: \"kubernetes.io/projected/79dd465e-2e36-423e-af5b-f41d715c0297-kube-api-access-z9b8h\") pod \"ironic-operator-controller-manager-7c6b8858cc-lk5ts\" (UID: \"79dd465e-2e36-423e-af5b-f41d715c0297\") " pod="openstack-operators/ironic-operator-controller-manager-7c6b8858cc-lk5ts" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.157229 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5e0647d6-93ed-40f1-a522-f5ecf769dd14-cert\") pod \"infra-operator-controller-manager-79955696d6-thn2f\" (UID: \"5e0647d6-93ed-40f1-a522-f5ecf769dd14\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-thn2f" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.157253 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jhhp\" (UniqueName: \"kubernetes.io/projected/53860709-50fb-44d9-910b-d4142608d8d8-kube-api-access-7jhhp\") pod \"neutron-operator-controller-manager-585dbc889-52b6h\" (UID: \"53860709-50fb-44d9-910b-d4142608d8d8\") " pod="openstack-operators/neutron-operator-controller-manager-585dbc889-52b6h" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.157294 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6qnr\" (UniqueName: \"kubernetes.io/projected/8291636d-bcda-4171-825a-f0f3c73b1320-kube-api-access-n6qnr\") pod \"mariadb-operator-controller-manager-67bf948998-q6xd2\" (UID: \"8291636d-bcda-4171-825a-f0f3c73b1320\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-q6xd2" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.157311 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdkw2\" (UniqueName: \"kubernetes.io/projected/75858b44-9a09-43f3-8de5-8ae999ae2657-kube-api-access-vdkw2\") pod \"manila-operator-controller-manager-7dd968899f-5s687\" (UID: \"75858b44-9a09-43f3-8de5-8ae999ae2657\") " pod="openstack-operators/manila-operator-controller-manager-7dd968899f-5s687" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.157343 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kg4hm\" (UniqueName: \"kubernetes.io/projected/4b7d42b0-25f5-40d4-8deb-34841b6c8c92-kube-api-access-kg4hm\") pod \"keystone-operator-controller-manager-84f48565d4-tn9zr\" (UID: \"4b7d42b0-25f5-40d4-8deb-34841b6c8c92\") " pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-tn9zr" Feb 02 11:08:36 crc kubenswrapper[4838]: E0202 11:08:36.157843 4838 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 02 11:08:36 crc kubenswrapper[4838]: E0202 11:08:36.164900 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e0647d6-93ed-40f1-a522-f5ecf769dd14-cert podName:5e0647d6-93ed-40f1-a522-f5ecf769dd14 nodeName:}" failed. No retries permitted until 2026-02-02 11:08:36.664871604 +0000 UTC m=+911.001972632 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/5e0647d6-93ed-40f1-a522-f5ecf769dd14-cert") pod "infra-operator-controller-manager-79955696d6-thn2f" (UID: "5e0647d6-93ed-40f1-a522-f5ecf769dd14") : secret "infra-operator-webhook-server-cert" not found Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.170925 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-68fc8c869-8l8mb"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.171772 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-8l8mb" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.175683 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-2hgst" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.187641 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-kbj4l"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.188230 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ffdh\" (UniqueName: \"kubernetes.io/projected/5e0647d6-93ed-40f1-a522-f5ecf769dd14-kube-api-access-6ffdh\") pod \"infra-operator-controller-manager-79955696d6-thn2f\" (UID: \"5e0647d6-93ed-40f1-a522-f5ecf769dd14\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-thn2f" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.188776 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kg4hm\" (UniqueName: \"kubernetes.io/projected/4b7d42b0-25f5-40d4-8deb-34841b6c8c92-kube-api-access-kg4hm\") pod \"keystone-operator-controller-manager-84f48565d4-tn9zr\" (UID: \"4b7d42b0-25f5-40d4-8deb-34841b6c8c92\") " pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-tn9zr" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.189204 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9b8h\" (UniqueName: \"kubernetes.io/projected/79dd465e-2e36-423e-af5b-f41d715c0297-kube-api-access-z9b8h\") pod \"ironic-operator-controller-manager-7c6b8858cc-lk5ts\" (UID: \"79dd465e-2e36-423e-af5b-f41d715c0297\") " pod="openstack-operators/ironic-operator-controller-manager-7c6b8858cc-lk5ts" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.204064 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.217661 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-68fc8c869-8l8mb"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.261975 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6qnr\" (UniqueName: \"kubernetes.io/projected/8291636d-bcda-4171-825a-f0f3c73b1320-kube-api-access-n6qnr\") pod \"mariadb-operator-controller-manager-67bf948998-q6xd2\" (UID: \"8291636d-bcda-4171-825a-f0f3c73b1320\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-q6xd2" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.262014 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdkw2\" (UniqueName: \"kubernetes.io/projected/75858b44-9a09-43f3-8de5-8ae999ae2657-kube-api-access-vdkw2\") pod \"manila-operator-controller-manager-7dd968899f-5s687\" (UID: \"75858b44-9a09-43f3-8de5-8ae999ae2657\") " pod="openstack-operators/manila-operator-controller-manager-7dd968899f-5s687" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.262195 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7svk\" (UniqueName: \"kubernetes.io/projected/0a8916a2-6c71-4678-9a42-23b82b72f891-kube-api-access-d7svk\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj\" (UID: \"0a8916a2-6c71-4678-9a42-23b82b72f891\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.262219 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0a8916a2-6c71-4678-9a42-23b82b72f891-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj\" (UID: \"0a8916a2-6c71-4678-9a42-23b82b72f891\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.262272 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfsq4\" (UniqueName: \"kubernetes.io/projected/e40ca74c-361a-4102-b7de-35464bb8821b-kube-api-access-hfsq4\") pod \"ovn-operator-controller-manager-788c46999f-lbjxl\" (UID: \"e40ca74c-361a-4102-b7de-35464bb8821b\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lbjxl" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.262289 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nl7rq\" (UniqueName: \"kubernetes.io/projected/7ebd9e27-5249-4c31-86cd-200ec9c3b852-kube-api-access-nl7rq\") pod \"placement-operator-controller-manager-5b964cf4cd-kbj4l\" (UID: \"7ebd9e27-5249-4c31-86cd-200ec9c3b852\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kbj4l" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.262340 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlwx5\" (UniqueName: \"kubernetes.io/projected/d960011d-30b7-4eb4-9e06-1b8b9aa0a114-kube-api-access-hlwx5\") pod \"swift-operator-controller-manager-68fc8c869-8l8mb\" (UID: \"d960011d-30b7-4eb4-9e06-1b8b9aa0a114\") " pod="openstack-operators/swift-operator-controller-manager-68fc8c869-8l8mb" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.262356 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kww6h\" (UniqueName: \"kubernetes.io/projected/a17b67e7-df64-4f12-8e78-c52068d2b1df-kube-api-access-kww6h\") pod \"nova-operator-controller-manager-55bff696bd-pms7g\" (UID: \"a17b67e7-df64-4f12-8e78-c52068d2b1df\") " pod="openstack-operators/nova-operator-controller-manager-55bff696bd-pms7g" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.262373 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jhhp\" (UniqueName: \"kubernetes.io/projected/53860709-50fb-44d9-910b-d4142608d8d8-kube-api-access-7jhhp\") pod \"neutron-operator-controller-manager-585dbc889-52b6h\" (UID: \"53860709-50fb-44d9-910b-d4142608d8d8\") " pod="openstack-operators/neutron-operator-controller-manager-585dbc889-52b6h" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.262397 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rr6js\" (UniqueName: \"kubernetes.io/projected/382292e1-fda7-4ab5-91e7-cf4ade4d6363-kube-api-access-rr6js\") pod \"octavia-operator-controller-manager-6687f8d877-hp6js\" (UID: \"382292e1-fda7-4ab5-91e7-cf4ade4d6363\") " pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-hp6js" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.300107 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdkw2\" (UniqueName: \"kubernetes.io/projected/75858b44-9a09-43f3-8de5-8ae999ae2657-kube-api-access-vdkw2\") pod \"manila-operator-controller-manager-7dd968899f-5s687\" (UID: \"75858b44-9a09-43f3-8de5-8ae999ae2657\") " pod="openstack-operators/manila-operator-controller-manager-7dd968899f-5s687" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.304084 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6qnr\" (UniqueName: \"kubernetes.io/projected/8291636d-bcda-4171-825a-f0f3c73b1320-kube-api-access-n6qnr\") pod \"mariadb-operator-controller-manager-67bf948998-q6xd2\" (UID: \"8291636d-bcda-4171-825a-f0f3c73b1320\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-q6xd2" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.304574 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jhhp\" (UniqueName: \"kubernetes.io/projected/53860709-50fb-44d9-910b-d4142608d8d8-kube-api-access-7jhhp\") pod \"neutron-operator-controller-manager-585dbc889-52b6h\" (UID: \"53860709-50fb-44d9-910b-d4142608d8d8\") " pod="openstack-operators/neutron-operator-controller-manager-585dbc889-52b6h" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.317738 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4d7p8"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.318700 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4d7p8" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.320210 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-xcbsp" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.325363 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7c6b8858cc-lk5ts" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.332779 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4d7p8"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.364381 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rr6js\" (UniqueName: \"kubernetes.io/projected/382292e1-fda7-4ab5-91e7-cf4ade4d6363-kube-api-access-rr6js\") pod \"octavia-operator-controller-manager-6687f8d877-hp6js\" (UID: \"382292e1-fda7-4ab5-91e7-cf4ade4d6363\") " pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-hp6js" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.364446 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7svk\" (UniqueName: \"kubernetes.io/projected/0a8916a2-6c71-4678-9a42-23b82b72f891-kube-api-access-d7svk\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj\" (UID: \"0a8916a2-6c71-4678-9a42-23b82b72f891\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.364484 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0a8916a2-6c71-4678-9a42-23b82b72f891-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj\" (UID: \"0a8916a2-6c71-4678-9a42-23b82b72f891\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.364523 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfsq4\" (UniqueName: \"kubernetes.io/projected/e40ca74c-361a-4102-b7de-35464bb8821b-kube-api-access-hfsq4\") pod \"ovn-operator-controller-manager-788c46999f-lbjxl\" (UID: \"e40ca74c-361a-4102-b7de-35464bb8821b\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lbjxl" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.364540 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nl7rq\" (UniqueName: \"kubernetes.io/projected/7ebd9e27-5249-4c31-86cd-200ec9c3b852-kube-api-access-nl7rq\") pod \"placement-operator-controller-manager-5b964cf4cd-kbj4l\" (UID: \"7ebd9e27-5249-4c31-86cd-200ec9c3b852\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kbj4l" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.364578 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlwx5\" (UniqueName: \"kubernetes.io/projected/d960011d-30b7-4eb4-9e06-1b8b9aa0a114-kube-api-access-hlwx5\") pod \"swift-operator-controller-manager-68fc8c869-8l8mb\" (UID: \"d960011d-30b7-4eb4-9e06-1b8b9aa0a114\") " pod="openstack-operators/swift-operator-controller-manager-68fc8c869-8l8mb" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.364595 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kww6h\" (UniqueName: \"kubernetes.io/projected/a17b67e7-df64-4f12-8e78-c52068d2b1df-kube-api-access-kww6h\") pod \"nova-operator-controller-manager-55bff696bd-pms7g\" (UID: \"a17b67e7-df64-4f12-8e78-c52068d2b1df\") " pod="openstack-operators/nova-operator-controller-manager-55bff696bd-pms7g" Feb 02 11:08:36 crc kubenswrapper[4838]: E0202 11:08:36.365402 4838 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 11:08:36 crc kubenswrapper[4838]: E0202 11:08:36.365438 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a8916a2-6c71-4678-9a42-23b82b72f891-cert podName:0a8916a2-6c71-4678-9a42-23b82b72f891 nodeName:}" failed. No retries permitted until 2026-02-02 11:08:36.865425417 +0000 UTC m=+911.202526445 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/0a8916a2-6c71-4678-9a42-23b82b72f891-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" (UID: "0a8916a2-6c71-4678-9a42-23b82b72f891") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.368923 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-tn9zr" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.397250 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-5s687" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.398409 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfsq4\" (UniqueName: \"kubernetes.io/projected/e40ca74c-361a-4102-b7de-35464bb8821b-kube-api-access-hfsq4\") pod \"ovn-operator-controller-manager-788c46999f-lbjxl\" (UID: \"e40ca74c-361a-4102-b7de-35464bb8821b\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lbjxl" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.401900 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7svk\" (UniqueName: \"kubernetes.io/projected/0a8916a2-6c71-4678-9a42-23b82b72f891-kube-api-access-d7svk\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj\" (UID: \"0a8916a2-6c71-4678-9a42-23b82b72f891\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.408268 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlwx5\" (UniqueName: \"kubernetes.io/projected/d960011d-30b7-4eb4-9e06-1b8b9aa0a114-kube-api-access-hlwx5\") pod \"swift-operator-controller-manager-68fc8c869-8l8mb\" (UID: \"d960011d-30b7-4eb4-9e06-1b8b9aa0a114\") " pod="openstack-operators/swift-operator-controller-manager-68fc8c869-8l8mb" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.409736 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nl7rq\" (UniqueName: \"kubernetes.io/projected/7ebd9e27-5249-4c31-86cd-200ec9c3b852-kube-api-access-nl7rq\") pod \"placement-operator-controller-manager-5b964cf4cd-kbj4l\" (UID: \"7ebd9e27-5249-4c31-86cd-200ec9c3b852\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kbj4l" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.410146 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-q6xd2" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.412583 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rr6js\" (UniqueName: \"kubernetes.io/projected/382292e1-fda7-4ab5-91e7-cf4ade4d6363-kube-api-access-rr6js\") pod \"octavia-operator-controller-manager-6687f8d877-hp6js\" (UID: \"382292e1-fda7-4ab5-91e7-cf4ade4d6363\") " pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-hp6js" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.414421 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kww6h\" (UniqueName: \"kubernetes.io/projected/a17b67e7-df64-4f12-8e78-c52068d2b1df-kube-api-access-kww6h\") pod \"nova-operator-controller-manager-55bff696bd-pms7g\" (UID: \"a17b67e7-df64-4f12-8e78-c52068d2b1df\") " pod="openstack-operators/nova-operator-controller-manager-55bff696bd-pms7g" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.430580 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-52b6h" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.455912 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-5xdv4"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.458090 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5xdv4" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.467557 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-9zqv6" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.500265 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4b496\" (UniqueName: \"kubernetes.io/projected/5e7863af-65e8-4d89-a434-fac6c13414cc-kube-api-access-4b496\") pod \"telemetry-operator-controller-manager-64b5b76f97-4d7p8\" (UID: \"5e7863af-65e8-4d89-a434-fac6c13414cc\") " pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4d7p8" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.517580 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-pms7g" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.534926 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-5xdv4"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.539548 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-hp6js" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.549309 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-h6lq5"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.551324 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-564965969-h6lq5" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.554047 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-whcmb" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.555340 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-h6lq5"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.567009 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.567979 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.568540 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lbjxl" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.571251 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-9mzgk" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.571592 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.572512 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.575259 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.582600 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pljzb"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.583705 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pljzb" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.586979 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-lg9cp" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.602375 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4b496\" (UniqueName: \"kubernetes.io/projected/5e7863af-65e8-4d89-a434-fac6c13414cc-kube-api-access-4b496\") pod \"telemetry-operator-controller-manager-64b5b76f97-4d7p8\" (UID: \"5e7863af-65e8-4d89-a434-fac6c13414cc\") " pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4d7p8" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.602463 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mch4f\" (UniqueName: \"kubernetes.io/projected/27825541-2816-4017-bba1-0f6f5946bb3c-kube-api-access-mch4f\") pod \"test-operator-controller-manager-56f8bfcd9f-5xdv4\" (UID: \"27825541-2816-4017-bba1-0f6f5946bb3c\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5xdv4" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.607672 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pljzb"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.632814 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4b496\" (UniqueName: \"kubernetes.io/projected/5e7863af-65e8-4d89-a434-fac6c13414cc-kube-api-access-4b496\") pod \"telemetry-operator-controller-manager-64b5b76f97-4d7p8\" (UID: \"5e7863af-65e8-4d89-a434-fac6c13414cc\") " pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4d7p8" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.636078 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kbj4l" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.665830 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-db2x5"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.669378 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-8l8mb" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.703294 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-webhook-certs\") pod \"openstack-operator-controller-manager-5dcd749f76-wzqhl\" (UID: \"83e1d0f5-af2b-4c12-abbd-712e18108a24\") " pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.703363 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5e0647d6-93ed-40f1-a522-f5ecf769dd14-cert\") pod \"infra-operator-controller-manager-79955696d6-thn2f\" (UID: \"5e0647d6-93ed-40f1-a522-f5ecf769dd14\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-thn2f" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.703414 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkbss\" (UniqueName: \"kubernetes.io/projected/a158eb1e-69b8-48ad-8061-a3e503981572-kube-api-access-gkbss\") pod \"rabbitmq-cluster-operator-manager-668c99d594-pljzb\" (UID: \"a158eb1e-69b8-48ad-8061-a3e503981572\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pljzb" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.703449 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-metrics-certs\") pod \"openstack-operator-controller-manager-5dcd749f76-wzqhl\" (UID: \"83e1d0f5-af2b-4c12-abbd-712e18108a24\") " pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.703490 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrf7q\" (UniqueName: \"kubernetes.io/projected/149430e7-7b6d-44d0-a474-944271e7bb5e-kube-api-access-lrf7q\") pod \"watcher-operator-controller-manager-564965969-h6lq5\" (UID: \"149430e7-7b6d-44d0-a474-944271e7bb5e\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-h6lq5" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.703512 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pffbs\" (UniqueName: \"kubernetes.io/projected/83e1d0f5-af2b-4c12-abbd-712e18108a24-kube-api-access-pffbs\") pod \"openstack-operator-controller-manager-5dcd749f76-wzqhl\" (UID: \"83e1d0f5-af2b-4c12-abbd-712e18108a24\") " pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.703529 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mch4f\" (UniqueName: \"kubernetes.io/projected/27825541-2816-4017-bba1-0f6f5946bb3c-kube-api-access-mch4f\") pod \"test-operator-controller-manager-56f8bfcd9f-5xdv4\" (UID: \"27825541-2816-4017-bba1-0f6f5946bb3c\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5xdv4" Feb 02 11:08:36 crc kubenswrapper[4838]: E0202 11:08:36.704700 4838 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 02 11:08:36 crc kubenswrapper[4838]: E0202 11:08:36.704740 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e0647d6-93ed-40f1-a522-f5ecf769dd14-cert podName:5e0647d6-93ed-40f1-a522-f5ecf769dd14 nodeName:}" failed. No retries permitted until 2026-02-02 11:08:37.704726559 +0000 UTC m=+912.041827587 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/5e0647d6-93ed-40f1-a522-f5ecf769dd14-cert") pod "infra-operator-controller-manager-79955696d6-thn2f" (UID: "5e0647d6-93ed-40f1-a522-f5ecf769dd14") : secret "infra-operator-webhook-server-cert" not found Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.711020 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4d7p8" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.726280 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mch4f\" (UniqueName: \"kubernetes.io/projected/27825541-2816-4017-bba1-0f6f5946bb3c-kube-api-access-mch4f\") pod \"test-operator-controller-manager-56f8bfcd9f-5xdv4\" (UID: \"27825541-2816-4017-bba1-0f6f5946bb3c\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5xdv4" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.790317 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-8886f4c47-xftmv"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.804730 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrf7q\" (UniqueName: \"kubernetes.io/projected/149430e7-7b6d-44d0-a474-944271e7bb5e-kube-api-access-lrf7q\") pod \"watcher-operator-controller-manager-564965969-h6lq5\" (UID: \"149430e7-7b6d-44d0-a474-944271e7bb5e\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-h6lq5" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.804789 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pffbs\" (UniqueName: \"kubernetes.io/projected/83e1d0f5-af2b-4c12-abbd-712e18108a24-kube-api-access-pffbs\") pod \"openstack-operator-controller-manager-5dcd749f76-wzqhl\" (UID: \"83e1d0f5-af2b-4c12-abbd-712e18108a24\") " pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.804844 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-webhook-certs\") pod \"openstack-operator-controller-manager-5dcd749f76-wzqhl\" (UID: \"83e1d0f5-af2b-4c12-abbd-712e18108a24\") " pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.804925 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkbss\" (UniqueName: \"kubernetes.io/projected/a158eb1e-69b8-48ad-8061-a3e503981572-kube-api-access-gkbss\") pod \"rabbitmq-cluster-operator-manager-668c99d594-pljzb\" (UID: \"a158eb1e-69b8-48ad-8061-a3e503981572\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pljzb" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.804989 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-metrics-certs\") pod \"openstack-operator-controller-manager-5dcd749f76-wzqhl\" (UID: \"83e1d0f5-af2b-4c12-abbd-712e18108a24\") " pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:08:36 crc kubenswrapper[4838]: E0202 11:08:36.805141 4838 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 11:08:36 crc kubenswrapper[4838]: E0202 11:08:36.805200 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-metrics-certs podName:83e1d0f5-af2b-4c12-abbd-712e18108a24 nodeName:}" failed. No retries permitted until 2026-02-02 11:08:37.3051805 +0000 UTC m=+911.642281528 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-metrics-certs") pod "openstack-operator-controller-manager-5dcd749f76-wzqhl" (UID: "83e1d0f5-af2b-4c12-abbd-712e18108a24") : secret "metrics-server-cert" not found Feb 02 11:08:36 crc kubenswrapper[4838]: E0202 11:08:36.805924 4838 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 11:08:36 crc kubenswrapper[4838]: E0202 11:08:36.805965 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-webhook-certs podName:83e1d0f5-af2b-4c12-abbd-712e18108a24 nodeName:}" failed. No retries permitted until 2026-02-02 11:08:37.30595286 +0000 UTC m=+911.643053888 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-webhook-certs") pod "openstack-operator-controller-manager-5dcd749f76-wzqhl" (UID: "83e1d0f5-af2b-4c12-abbd-712e18108a24") : secret "webhook-server-cert" not found Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.809129 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d874c8fc-hm4jh"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.823589 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pffbs\" (UniqueName: \"kubernetes.io/projected/83e1d0f5-af2b-4c12-abbd-712e18108a24-kube-api-access-pffbs\") pod \"openstack-operator-controller-manager-5dcd749f76-wzqhl\" (UID: \"83e1d0f5-af2b-4c12-abbd-712e18108a24\") " pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.832278 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5xdv4" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.833088 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-6d9697b7f4-rs64q"] Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.836204 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrf7q\" (UniqueName: \"kubernetes.io/projected/149430e7-7b6d-44d0-a474-944271e7bb5e-kube-api-access-lrf7q\") pod \"watcher-operator-controller-manager-564965969-h6lq5\" (UID: \"149430e7-7b6d-44d0-a474-944271e7bb5e\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-h6lq5" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.840663 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkbss\" (UniqueName: \"kubernetes.io/projected/a158eb1e-69b8-48ad-8061-a3e503981572-kube-api-access-gkbss\") pod \"rabbitmq-cluster-operator-manager-668c99d594-pljzb\" (UID: \"a158eb1e-69b8-48ad-8061-a3e503981572\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pljzb" Feb 02 11:08:36 crc kubenswrapper[4838]: W0202 11:08:36.871539 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod13ab41db_f38e_4980_89f9_361236526dfa.slice/crio-ec920bad3024bf04a84c75a7aa2be4ab49e41e75cdc7ade0bd261b0e75e674e6 WatchSource:0}: Error finding container ec920bad3024bf04a84c75a7aa2be4ab49e41e75cdc7ade0bd261b0e75e674e6: Status 404 returned error can't find the container with id ec920bad3024bf04a84c75a7aa2be4ab49e41e75cdc7ade0bd261b0e75e674e6 Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.888977 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-rs64q" event={"ID":"e358aab4-cbb0-4522-8740-6646b7fdcabd","Type":"ContainerStarted","Data":"f9fdbc0110659b3558c6f4790554cf7af485582dcd7ff9dc2ada87ce75fa69b5"} Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.893500 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-db2x5" event={"ID":"cb4f687b-4b19-447b-beb4-1646c2a40800","Type":"ContainerStarted","Data":"86e2e76c3a572fcc21f160055335cf086f67e37b208241d65cb2e3248cbd9cb3"} Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.903378 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-xftmv" event={"ID":"084b46d2-88a9-42e4-83b2-dbccf264aafe","Type":"ContainerStarted","Data":"2c27518807c7cefb00428d47e9c2888d5b41183b73ea1a5626f6bc8e01db524c"} Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.904672 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-564965969-h6lq5" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.905551 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0a8916a2-6c71-4678-9a42-23b82b72f891-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj\" (UID: \"0a8916a2-6c71-4678-9a42-23b82b72f891\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" Feb 02 11:08:36 crc kubenswrapper[4838]: E0202 11:08:36.905902 4838 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 11:08:36 crc kubenswrapper[4838]: E0202 11:08:36.905955 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a8916a2-6c71-4678-9a42-23b82b72f891-cert podName:0a8916a2-6c71-4678-9a42-23b82b72f891 nodeName:}" failed. No retries permitted until 2026-02-02 11:08:37.905937439 +0000 UTC m=+912.243038467 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/0a8916a2-6c71-4678-9a42-23b82b72f891-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" (UID: "0a8916a2-6c71-4678-9a42-23b82b72f891") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.908870 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-hm4jh" event={"ID":"13ab41db-f38e-4980-89f9-361236526dfa","Type":"ContainerStarted","Data":"ec920bad3024bf04a84c75a7aa2be4ab49e41e75cdc7ade0bd261b0e75e674e6"} Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.970524 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pljzb" Feb 02 11:08:36 crc kubenswrapper[4838]: I0202 11:08:36.997428 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-69d6db494d-qrf72"] Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.012903 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-2kqdq"] Feb 02 11:08:37 crc kubenswrapper[4838]: W0202 11:08:37.072795 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podccd2dd8e_7b67_4b94_9b9a_b76fab87903c.slice/crio-6818fcf2a157b0c0f9925aeb1742bf94fbfc0281ce3694b2621e53c1aab52c39 WatchSource:0}: Error finding container 6818fcf2a157b0c0f9925aeb1742bf94fbfc0281ce3694b2621e53c1aab52c39: Status 404 returned error can't find the container with id 6818fcf2a157b0c0f9925aeb1742bf94fbfc0281ce3694b2621e53c1aab52c39 Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.119192 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7c6b8858cc-lk5ts"] Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.127649 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-585dbc889-52b6h"] Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.133862 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7dd968899f-5s687"] Feb 02 11:08:37 crc kubenswrapper[4838]: W0202 11:08:37.156632 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod75858b44_9a09_43f3_8de5_8ae999ae2657.slice/crio-bbc30bb368acd78c9c6f4b351eeb064b748c35789ff864b20fc0a32e8ad215e4 WatchSource:0}: Error finding container bbc30bb368acd78c9c6f4b351eeb064b748c35789ff864b20fc0a32e8ad215e4: Status 404 returned error can't find the container with id bbc30bb368acd78c9c6f4b351eeb064b748c35789ff864b20fc0a32e8ad215e4 Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.281239 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-84f48565d4-tn9zr"] Feb 02 11:08:37 crc kubenswrapper[4838]: W0202 11:08:37.287694 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4b7d42b0_25f5_40d4_8deb_34841b6c8c92.slice/crio-8ba855ca410f8728cbb9090aa9b8296d2c86a0e8fcaced5de94b02bebc28dc5e WatchSource:0}: Error finding container 8ba855ca410f8728cbb9090aa9b8296d2c86a0e8fcaced5de94b02bebc28dc5e: Status 404 returned error can't find the container with id 8ba855ca410f8728cbb9090aa9b8296d2c86a0e8fcaced5de94b02bebc28dc5e Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.289060 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-kbj4l"] Feb 02 11:08:37 crc kubenswrapper[4838]: W0202 11:08:37.295281 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7ebd9e27_5249_4c31_86cd_200ec9c3b852.slice/crio-848576a4ca2a5bf7042cf6e76679f6469cfcc4fef344ec18bf7fd8a31e9fc679 WatchSource:0}: Error finding container 848576a4ca2a5bf7042cf6e76679f6469cfcc4fef344ec18bf7fd8a31e9fc679: Status 404 returned error can't find the container with id 848576a4ca2a5bf7042cf6e76679f6469cfcc4fef344ec18bf7fd8a31e9fc679 Feb 02 11:08:37 crc kubenswrapper[4838]: W0202 11:08:37.304604 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda17b67e7_df64_4f12_8e78_c52068d2b1df.slice/crio-d0cd50575dc4fc6858850446084c7766ff0eb92137dd3a410009eef69d932db2 WatchSource:0}: Error finding container d0cd50575dc4fc6858850446084c7766ff0eb92137dd3a410009eef69d932db2: Status 404 returned error can't find the container with id d0cd50575dc4fc6858850446084c7766ff0eb92137dd3a410009eef69d932db2 Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.314089 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-metrics-certs\") pod \"openstack-operator-controller-manager-5dcd749f76-wzqhl\" (UID: \"83e1d0f5-af2b-4c12-abbd-712e18108a24\") " pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.314187 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-webhook-certs\") pod \"openstack-operator-controller-manager-5dcd749f76-wzqhl\" (UID: \"83e1d0f5-af2b-4c12-abbd-712e18108a24\") " pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.314309 4838 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.314345 4838 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.314485 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-metrics-certs podName:83e1d0f5-af2b-4c12-abbd-712e18108a24 nodeName:}" failed. No retries permitted until 2026-02-02 11:08:38.314361667 +0000 UTC m=+912.651462695 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-metrics-certs") pod "openstack-operator-controller-manager-5dcd749f76-wzqhl" (UID: "83e1d0f5-af2b-4c12-abbd-712e18108a24") : secret "metrics-server-cert" not found Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.314536 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-webhook-certs podName:83e1d0f5-af2b-4c12-abbd-712e18108a24 nodeName:}" failed. No retries permitted until 2026-02-02 11:08:38.314526651 +0000 UTC m=+912.651627679 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-webhook-certs") pod "openstack-operator-controller-manager-5dcd749f76-wzqhl" (UID: "83e1d0f5-af2b-4c12-abbd-712e18108a24") : secret "webhook-server-cert" not found Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.317164 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-55bff696bd-pms7g"] Feb 02 11:08:37 crc kubenswrapper[4838]: W0202 11:08:37.326001 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8291636d_bcda_4171_825a_f0f3c73b1320.slice/crio-985173c79df04f68f982a879ed5db3223ee02d5c72af5c6dd9eedafa9febd418 WatchSource:0}: Error finding container 985173c79df04f68f982a879ed5db3223ee02d5c72af5c6dd9eedafa9febd418: Status 404 returned error can't find the container with id 985173c79df04f68f982a879ed5db3223ee02d5c72af5c6dd9eedafa9febd418 Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.327953 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-q6xd2"] Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.406145 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-lbjxl"] Feb 02 11:08:37 crc kubenswrapper[4838]: W0202 11:08:37.425802 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode40ca74c_361a_4102_b7de_35464bb8821b.slice/crio-8ea9662f35d2a364c1a0cc0c747c4bc7fe95d7d55b38a262a4ded62f657b728f WatchSource:0}: Error finding container 8ea9662f35d2a364c1a0cc0c747c4bc7fe95d7d55b38a262a4ded62f657b728f: Status 404 returned error can't find the container with id 8ea9662f35d2a364c1a0cc0c747c4bc7fe95d7d55b38a262a4ded62f657b728f Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.432041 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:ea7b72b648a5bde2eebd804c2a5c1608d448a4892176c1b8d000c1eef4bb92b4,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hfsq4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-788c46999f-lbjxl_openstack-operators(e40ca74c-361a-4102-b7de-35464bb8821b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.433817 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lbjxl" podUID="e40ca74c-361a-4102-b7de-35464bb8821b" Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.445793 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6687f8d877-hp6js"] Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.446136 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rr6js,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-6687f8d877-hp6js_openstack-operators(382292e1-fda7-4ab5-91e7-cf4ade4d6363): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.447504 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-hp6js" podUID="382292e1-fda7-4ab5-91e7-cf4ade4d6363" Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.475231 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4d7p8"] Feb 02 11:08:37 crc kubenswrapper[4838]: W0202 11:08:37.481091 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5e7863af_65e8_4d89_a434_fac6c13414cc.slice/crio-968b0c43d9cc82a6040b8588372e488463171d16e6fa044434982e9920d99352 WatchSource:0}: Error finding container 968b0c43d9cc82a6040b8588372e488463171d16e6fa044434982e9920d99352: Status 404 returned error can't find the container with id 968b0c43d9cc82a6040b8588372e488463171d16e6fa044434982e9920d99352 Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.482443 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-68fc8c869-8l8mb"] Feb 02 11:08:37 crc kubenswrapper[4838]: W0202 11:08:37.484039 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd960011d_30b7_4eb4_9e06_1b8b9aa0a114.slice/crio-4254434cce69c2c78b70b5ad3efb9f1942695ac044916600fafc6442e864b122 WatchSource:0}: Error finding container 4254434cce69c2c78b70b5ad3efb9f1942695ac044916600fafc6442e864b122: Status 404 returned error can't find the container with id 4254434cce69c2c78b70b5ad3efb9f1942695ac044916600fafc6442e864b122 Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.484520 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:f9bf288cd0c13912404027a58ea3b90d4092b641e8265adc5c88644ea7fe901a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4b496,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-64b5b76f97-4d7p8_openstack-operators(5e7863af-65e8-4d89-a434-fac6c13414cc): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.485877 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4d7p8" podUID="5e7863af-65e8-4d89-a434-fac6c13414cc" Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.486296 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:42ad717de1b82267d244b016e5491a5b66a5c3deb6b8c2906a379e1296a2c382,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hlwx5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-68fc8c869-8l8mb_openstack-operators(d960011d-30b7-4eb4-9e06-1b8b9aa0a114): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.487788 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-8l8mb" podUID="d960011d-30b7-4eb4-9e06-1b8b9aa0a114" Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.581182 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-5xdv4"] Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.581794 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mch4f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-56f8bfcd9f-5xdv4_openstack-operators(27825541-2816-4017-bba1-0f6f5946bb3c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.581987 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gkbss,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-pljzb_openstack-operators(a158eb1e-69b8-48ad-8061-a3e503981572): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.584053 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pljzb" podUID="a158eb1e-69b8-48ad-8061-a3e503981572" Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.584114 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5xdv4" podUID="27825541-2816-4017-bba1-0f6f5946bb3c" Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.595097 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-h6lq5"] Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.599330 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pljzb"] Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.720586 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5e0647d6-93ed-40f1-a522-f5ecf769dd14-cert\") pod \"infra-operator-controller-manager-79955696d6-thn2f\" (UID: \"5e0647d6-93ed-40f1-a522-f5ecf769dd14\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-thn2f" Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.720844 4838 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.720956 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e0647d6-93ed-40f1-a522-f5ecf769dd14-cert podName:5e0647d6-93ed-40f1-a522-f5ecf769dd14 nodeName:}" failed. No retries permitted until 2026-02-02 11:08:39.720914886 +0000 UTC m=+914.058015914 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/5e0647d6-93ed-40f1-a522-f5ecf769dd14-cert") pod "infra-operator-controller-manager-79955696d6-thn2f" (UID: "5e0647d6-93ed-40f1-a522-f5ecf769dd14") : secret "infra-operator-webhook-server-cert" not found Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.933059 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0a8916a2-6c71-4678-9a42-23b82b72f891-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj\" (UID: \"0a8916a2-6c71-4678-9a42-23b82b72f891\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.933376 4838 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.933449 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a8916a2-6c71-4678-9a42-23b82b72f891-cert podName:0a8916a2-6c71-4678-9a42-23b82b72f891 nodeName:}" failed. No retries permitted until 2026-02-02 11:08:39.933431154 +0000 UTC m=+914.270532182 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/0a8916a2-6c71-4678-9a42-23b82b72f891-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" (UID: "0a8916a2-6c71-4678-9a42-23b82b72f891") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.950001 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-b5m67"] Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.953090 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-q6xd2" event={"ID":"8291636d-bcda-4171-825a-f0f3c73b1320","Type":"ContainerStarted","Data":"985173c79df04f68f982a879ed5db3223ee02d5c72af5c6dd9eedafa9febd418"} Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.953129 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-52b6h" event={"ID":"53860709-50fb-44d9-910b-d4142608d8d8","Type":"ContainerStarted","Data":"099bd5106ae5b8820c5c18fee7c06740d4a670245815db4d550dbaaa1be724b4"} Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.953273 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b5m67" Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.953757 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-2kqdq" event={"ID":"ccd2dd8e-7b67-4b94-9b9a-b76fab87903c","Type":"ContainerStarted","Data":"6818fcf2a157b0c0f9925aeb1742bf94fbfc0281ce3694b2621e53c1aab52c39"} Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.954046 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b5m67"] Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.954757 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lbjxl" event={"ID":"e40ca74c-361a-4102-b7de-35464bb8821b","Type":"ContainerStarted","Data":"8ea9662f35d2a364c1a0cc0c747c4bc7fe95d7d55b38a262a4ded62f657b728f"} Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.956108 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:ea7b72b648a5bde2eebd804c2a5c1608d448a4892176c1b8d000c1eef4bb92b4\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lbjxl" podUID="e40ca74c-361a-4102-b7de-35464bb8821b" Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.958495 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-5s687" event={"ID":"75858b44-9a09-43f3-8de5-8ae999ae2657","Type":"ContainerStarted","Data":"bbc30bb368acd78c9c6f4b351eeb064b748c35789ff864b20fc0a32e8ad215e4"} Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.959575 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-8l8mb" event={"ID":"d960011d-30b7-4eb4-9e06-1b8b9aa0a114","Type":"ContainerStarted","Data":"4254434cce69c2c78b70b5ad3efb9f1942695ac044916600fafc6442e864b122"} Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.960520 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:42ad717de1b82267d244b016e5491a5b66a5c3deb6b8c2906a379e1296a2c382\\\"\"" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-8l8mb" podUID="d960011d-30b7-4eb4-9e06-1b8b9aa0a114" Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.961602 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4d7p8" event={"ID":"5e7863af-65e8-4d89-a434-fac6c13414cc","Type":"ContainerStarted","Data":"968b0c43d9cc82a6040b8588372e488463171d16e6fa044434982e9920d99352"} Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.963313 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:f9bf288cd0c13912404027a58ea3b90d4092b641e8265adc5c88644ea7fe901a\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4d7p8" podUID="5e7863af-65e8-4d89-a434-fac6c13414cc" Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.969003 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5xdv4" event={"ID":"27825541-2816-4017-bba1-0f6f5946bb3c","Type":"ContainerStarted","Data":"3d359b089cef7725f96d900d48898cf5a2d6611c0d053fb55bdfbe617b607785"} Feb 02 11:08:37 crc kubenswrapper[4838]: E0202 11:08:37.970320 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241\\\"\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5xdv4" podUID="27825541-2816-4017-bba1-0f6f5946bb3c" Feb 02 11:08:37 crc kubenswrapper[4838]: I0202 11:08:37.977868 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-qrf72" event={"ID":"9cfe65eb-c657-4f96-b48f-1c9831fd75ba","Type":"ContainerStarted","Data":"e64c88230b3e4915187f478d9d1687e6b3f2235f7f79d65718aafe5d427498e6"} Feb 02 11:08:38 crc kubenswrapper[4838]: I0202 11:08:38.006390 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kbj4l" event={"ID":"7ebd9e27-5249-4c31-86cd-200ec9c3b852","Type":"ContainerStarted","Data":"848576a4ca2a5bf7042cf6e76679f6469cfcc4fef344ec18bf7fd8a31e9fc679"} Feb 02 11:08:38 crc kubenswrapper[4838]: I0202 11:08:38.028656 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7c6b8858cc-lk5ts" event={"ID":"79dd465e-2e36-423e-af5b-f41d715c0297","Type":"ContainerStarted","Data":"7e9f06ffbb427820ab862b765d2542a873f780c3e90e26dab3a60eb8f275963c"} Feb 02 11:08:38 crc kubenswrapper[4838]: I0202 11:08:38.047294 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-tn9zr" event={"ID":"4b7d42b0-25f5-40d4-8deb-34841b6c8c92","Type":"ContainerStarted","Data":"8ba855ca410f8728cbb9090aa9b8296d2c86a0e8fcaced5de94b02bebc28dc5e"} Feb 02 11:08:38 crc kubenswrapper[4838]: I0202 11:08:38.069350 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-hp6js" event={"ID":"382292e1-fda7-4ab5-91e7-cf4ade4d6363","Type":"ContainerStarted","Data":"949d0d6924d9f1108d49b6fb4990bc7cb3328aed95e0404a0327527f6c234af2"} Feb 02 11:08:38 crc kubenswrapper[4838]: E0202 11:08:38.076922 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-hp6js" podUID="382292e1-fda7-4ab5-91e7-cf4ade4d6363" Feb 02 11:08:38 crc kubenswrapper[4838]: I0202 11:08:38.079883 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pljzb" event={"ID":"a158eb1e-69b8-48ad-8061-a3e503981572","Type":"ContainerStarted","Data":"94e4363ab9e9f39f2f2c261b786f9074049d88978dda9966395b8520027a6537"} Feb 02 11:08:38 crc kubenswrapper[4838]: E0202 11:08:38.096066 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pljzb" podUID="a158eb1e-69b8-48ad-8061-a3e503981572" Feb 02 11:08:38 crc kubenswrapper[4838]: I0202 11:08:38.112894 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-pms7g" event={"ID":"a17b67e7-df64-4f12-8e78-c52068d2b1df","Type":"ContainerStarted","Data":"d0cd50575dc4fc6858850446084c7766ff0eb92137dd3a410009eef69d932db2"} Feb 02 11:08:38 crc kubenswrapper[4838]: I0202 11:08:38.124019 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-564965969-h6lq5" event={"ID":"149430e7-7b6d-44d0-a474-944271e7bb5e","Type":"ContainerStarted","Data":"ec08b5f922c5c32da4f75f7f2a7ea19f6fc0b1e70064c4f51b5a94c65d2f937f"} Feb 02 11:08:38 crc kubenswrapper[4838]: I0202 11:08:38.150628 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07a77276-c90a-4b36-bfb6-3da9beccbb2f-utilities\") pod \"redhat-marketplace-b5m67\" (UID: \"07a77276-c90a-4b36-bfb6-3da9beccbb2f\") " pod="openshift-marketplace/redhat-marketplace-b5m67" Feb 02 11:08:38 crc kubenswrapper[4838]: I0202 11:08:38.150695 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07a77276-c90a-4b36-bfb6-3da9beccbb2f-catalog-content\") pod \"redhat-marketplace-b5m67\" (UID: \"07a77276-c90a-4b36-bfb6-3da9beccbb2f\") " pod="openshift-marketplace/redhat-marketplace-b5m67" Feb 02 11:08:38 crc kubenswrapper[4838]: I0202 11:08:38.150769 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4779\" (UniqueName: \"kubernetes.io/projected/07a77276-c90a-4b36-bfb6-3da9beccbb2f-kube-api-access-m4779\") pod \"redhat-marketplace-b5m67\" (UID: \"07a77276-c90a-4b36-bfb6-3da9beccbb2f\") " pod="openshift-marketplace/redhat-marketplace-b5m67" Feb 02 11:08:38 crc kubenswrapper[4838]: I0202 11:08:38.252149 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07a77276-c90a-4b36-bfb6-3da9beccbb2f-utilities\") pod \"redhat-marketplace-b5m67\" (UID: \"07a77276-c90a-4b36-bfb6-3da9beccbb2f\") " pod="openshift-marketplace/redhat-marketplace-b5m67" Feb 02 11:08:38 crc kubenswrapper[4838]: I0202 11:08:38.252207 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07a77276-c90a-4b36-bfb6-3da9beccbb2f-catalog-content\") pod \"redhat-marketplace-b5m67\" (UID: \"07a77276-c90a-4b36-bfb6-3da9beccbb2f\") " pod="openshift-marketplace/redhat-marketplace-b5m67" Feb 02 11:08:38 crc kubenswrapper[4838]: I0202 11:08:38.252286 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4779\" (UniqueName: \"kubernetes.io/projected/07a77276-c90a-4b36-bfb6-3da9beccbb2f-kube-api-access-m4779\") pod \"redhat-marketplace-b5m67\" (UID: \"07a77276-c90a-4b36-bfb6-3da9beccbb2f\") " pod="openshift-marketplace/redhat-marketplace-b5m67" Feb 02 11:08:38 crc kubenswrapper[4838]: I0202 11:08:38.252906 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07a77276-c90a-4b36-bfb6-3da9beccbb2f-utilities\") pod \"redhat-marketplace-b5m67\" (UID: \"07a77276-c90a-4b36-bfb6-3da9beccbb2f\") " pod="openshift-marketplace/redhat-marketplace-b5m67" Feb 02 11:08:38 crc kubenswrapper[4838]: I0202 11:08:38.252953 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07a77276-c90a-4b36-bfb6-3da9beccbb2f-catalog-content\") pod \"redhat-marketplace-b5m67\" (UID: \"07a77276-c90a-4b36-bfb6-3da9beccbb2f\") " pod="openshift-marketplace/redhat-marketplace-b5m67" Feb 02 11:08:38 crc kubenswrapper[4838]: I0202 11:08:38.285423 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4779\" (UniqueName: \"kubernetes.io/projected/07a77276-c90a-4b36-bfb6-3da9beccbb2f-kube-api-access-m4779\") pod \"redhat-marketplace-b5m67\" (UID: \"07a77276-c90a-4b36-bfb6-3da9beccbb2f\") " pod="openshift-marketplace/redhat-marketplace-b5m67" Feb 02 11:08:38 crc kubenswrapper[4838]: I0202 11:08:38.302024 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b5m67" Feb 02 11:08:38 crc kubenswrapper[4838]: I0202 11:08:38.353335 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-metrics-certs\") pod \"openstack-operator-controller-manager-5dcd749f76-wzqhl\" (UID: \"83e1d0f5-af2b-4c12-abbd-712e18108a24\") " pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:08:38 crc kubenswrapper[4838]: I0202 11:08:38.353483 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-webhook-certs\") pod \"openstack-operator-controller-manager-5dcd749f76-wzqhl\" (UID: \"83e1d0f5-af2b-4c12-abbd-712e18108a24\") " pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:08:38 crc kubenswrapper[4838]: E0202 11:08:38.353655 4838 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 11:08:38 crc kubenswrapper[4838]: E0202 11:08:38.353710 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-webhook-certs podName:83e1d0f5-af2b-4c12-abbd-712e18108a24 nodeName:}" failed. No retries permitted until 2026-02-02 11:08:40.353695964 +0000 UTC m=+914.690796992 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-webhook-certs") pod "openstack-operator-controller-manager-5dcd749f76-wzqhl" (UID: "83e1d0f5-af2b-4c12-abbd-712e18108a24") : secret "webhook-server-cert" not found Feb 02 11:08:38 crc kubenswrapper[4838]: E0202 11:08:38.354333 4838 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 11:08:38 crc kubenswrapper[4838]: E0202 11:08:38.354392 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-metrics-certs podName:83e1d0f5-af2b-4c12-abbd-712e18108a24 nodeName:}" failed. No retries permitted until 2026-02-02 11:08:40.354380502 +0000 UTC m=+914.691481530 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-metrics-certs") pod "openstack-operator-controller-manager-5dcd749f76-wzqhl" (UID: "83e1d0f5-af2b-4c12-abbd-712e18108a24") : secret "metrics-server-cert" not found Feb 02 11:08:38 crc kubenswrapper[4838]: I0202 11:08:38.844029 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b5m67"] Feb 02 11:08:38 crc kubenswrapper[4838]: W0202 11:08:38.857007 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod07a77276_c90a_4b36_bfb6_3da9beccbb2f.slice/crio-5eedc88f20c36ee3bcddce6d9835ea06f84a54b7f5557ca0bdaab0229eb58977 WatchSource:0}: Error finding container 5eedc88f20c36ee3bcddce6d9835ea06f84a54b7f5557ca0bdaab0229eb58977: Status 404 returned error can't find the container with id 5eedc88f20c36ee3bcddce6d9835ea06f84a54b7f5557ca0bdaab0229eb58977 Feb 02 11:08:39 crc kubenswrapper[4838]: I0202 11:08:39.152815 4838 generic.go:334] "Generic (PLEG): container finished" podID="07a77276-c90a-4b36-bfb6-3da9beccbb2f" containerID="7de9a63967a263a87ac401cd2982cf61d02aed029f2357e0485183c8b58c06a8" exitCode=0 Feb 02 11:08:39 crc kubenswrapper[4838]: I0202 11:08:39.153004 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b5m67" event={"ID":"07a77276-c90a-4b36-bfb6-3da9beccbb2f","Type":"ContainerDied","Data":"7de9a63967a263a87ac401cd2982cf61d02aed029f2357e0485183c8b58c06a8"} Feb 02 11:08:39 crc kubenswrapper[4838]: I0202 11:08:39.153048 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b5m67" event={"ID":"07a77276-c90a-4b36-bfb6-3da9beccbb2f","Type":"ContainerStarted","Data":"5eedc88f20c36ee3bcddce6d9835ea06f84a54b7f5557ca0bdaab0229eb58977"} Feb 02 11:08:39 crc kubenswrapper[4838]: E0202 11:08:39.162006 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:f9bf288cd0c13912404027a58ea3b90d4092b641e8265adc5c88644ea7fe901a\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4d7p8" podUID="5e7863af-65e8-4d89-a434-fac6c13414cc" Feb 02 11:08:39 crc kubenswrapper[4838]: E0202 11:08:39.162423 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241\\\"\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5xdv4" podUID="27825541-2816-4017-bba1-0f6f5946bb3c" Feb 02 11:08:39 crc kubenswrapper[4838]: E0202 11:08:39.162557 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:ea7b72b648a5bde2eebd804c2a5c1608d448a4892176c1b8d000c1eef4bb92b4\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lbjxl" podUID="e40ca74c-361a-4102-b7de-35464bb8821b" Feb 02 11:08:39 crc kubenswrapper[4838]: E0202 11:08:39.162650 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pljzb" podUID="a158eb1e-69b8-48ad-8061-a3e503981572" Feb 02 11:08:39 crc kubenswrapper[4838]: E0202 11:08:39.162739 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:42ad717de1b82267d244b016e5491a5b66a5c3deb6b8c2906a379e1296a2c382\\\"\"" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-8l8mb" podUID="d960011d-30b7-4eb4-9e06-1b8b9aa0a114" Feb 02 11:08:39 crc kubenswrapper[4838]: E0202 11:08:39.162821 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-hp6js" podUID="382292e1-fda7-4ab5-91e7-cf4ade4d6363" Feb 02 11:08:39 crc kubenswrapper[4838]: I0202 11:08:39.801578 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5e0647d6-93ed-40f1-a522-f5ecf769dd14-cert\") pod \"infra-operator-controller-manager-79955696d6-thn2f\" (UID: \"5e0647d6-93ed-40f1-a522-f5ecf769dd14\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-thn2f" Feb 02 11:08:39 crc kubenswrapper[4838]: E0202 11:08:39.801768 4838 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 02 11:08:39 crc kubenswrapper[4838]: E0202 11:08:39.801851 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e0647d6-93ed-40f1-a522-f5ecf769dd14-cert podName:5e0647d6-93ed-40f1-a522-f5ecf769dd14 nodeName:}" failed. No retries permitted until 2026-02-02 11:08:43.80182884 +0000 UTC m=+918.138929858 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/5e0647d6-93ed-40f1-a522-f5ecf769dd14-cert") pod "infra-operator-controller-manager-79955696d6-thn2f" (UID: "5e0647d6-93ed-40f1-a522-f5ecf769dd14") : secret "infra-operator-webhook-server-cert" not found Feb 02 11:08:40 crc kubenswrapper[4838]: I0202 11:08:40.003822 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0a8916a2-6c71-4678-9a42-23b82b72f891-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj\" (UID: \"0a8916a2-6c71-4678-9a42-23b82b72f891\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" Feb 02 11:08:40 crc kubenswrapper[4838]: E0202 11:08:40.003969 4838 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 11:08:40 crc kubenswrapper[4838]: E0202 11:08:40.004015 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a8916a2-6c71-4678-9a42-23b82b72f891-cert podName:0a8916a2-6c71-4678-9a42-23b82b72f891 nodeName:}" failed. No retries permitted until 2026-02-02 11:08:44.004000736 +0000 UTC m=+918.341101764 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/0a8916a2-6c71-4678-9a42-23b82b72f891-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" (UID: "0a8916a2-6c71-4678-9a42-23b82b72f891") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 11:08:40 crc kubenswrapper[4838]: I0202 11:08:40.408872 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-metrics-certs\") pod \"openstack-operator-controller-manager-5dcd749f76-wzqhl\" (UID: \"83e1d0f5-af2b-4c12-abbd-712e18108a24\") " pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:08:40 crc kubenswrapper[4838]: I0202 11:08:40.408974 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-webhook-certs\") pod \"openstack-operator-controller-manager-5dcd749f76-wzqhl\" (UID: \"83e1d0f5-af2b-4c12-abbd-712e18108a24\") " pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:08:40 crc kubenswrapper[4838]: E0202 11:08:40.409117 4838 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 11:08:40 crc kubenswrapper[4838]: E0202 11:08:40.409166 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-webhook-certs podName:83e1d0f5-af2b-4c12-abbd-712e18108a24 nodeName:}" failed. No retries permitted until 2026-02-02 11:08:44.409150078 +0000 UTC m=+918.746251106 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-webhook-certs") pod "openstack-operator-controller-manager-5dcd749f76-wzqhl" (UID: "83e1d0f5-af2b-4c12-abbd-712e18108a24") : secret "webhook-server-cert" not found Feb 02 11:08:40 crc kubenswrapper[4838]: E0202 11:08:40.409297 4838 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 11:08:40 crc kubenswrapper[4838]: E0202 11:08:40.409337 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-metrics-certs podName:83e1d0f5-af2b-4c12-abbd-712e18108a24 nodeName:}" failed. No retries permitted until 2026-02-02 11:08:44.409328203 +0000 UTC m=+918.746429231 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-metrics-certs") pod "openstack-operator-controller-manager-5dcd749f76-wzqhl" (UID: "83e1d0f5-af2b-4c12-abbd-712e18108a24") : secret "metrics-server-cert" not found Feb 02 11:08:43 crc kubenswrapper[4838]: I0202 11:08:43.862694 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5e0647d6-93ed-40f1-a522-f5ecf769dd14-cert\") pod \"infra-operator-controller-manager-79955696d6-thn2f\" (UID: \"5e0647d6-93ed-40f1-a522-f5ecf769dd14\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-thn2f" Feb 02 11:08:43 crc kubenswrapper[4838]: E0202 11:08:43.862903 4838 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 02 11:08:43 crc kubenswrapper[4838]: E0202 11:08:43.863205 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e0647d6-93ed-40f1-a522-f5ecf769dd14-cert podName:5e0647d6-93ed-40f1-a522-f5ecf769dd14 nodeName:}" failed. No retries permitted until 2026-02-02 11:08:51.863183662 +0000 UTC m=+926.200284710 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/5e0647d6-93ed-40f1-a522-f5ecf769dd14-cert") pod "infra-operator-controller-manager-79955696d6-thn2f" (UID: "5e0647d6-93ed-40f1-a522-f5ecf769dd14") : secret "infra-operator-webhook-server-cert" not found Feb 02 11:08:44 crc kubenswrapper[4838]: I0202 11:08:44.066030 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0a8916a2-6c71-4678-9a42-23b82b72f891-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj\" (UID: \"0a8916a2-6c71-4678-9a42-23b82b72f891\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" Feb 02 11:08:44 crc kubenswrapper[4838]: E0202 11:08:44.066184 4838 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 11:08:44 crc kubenswrapper[4838]: E0202 11:08:44.066529 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a8916a2-6c71-4678-9a42-23b82b72f891-cert podName:0a8916a2-6c71-4678-9a42-23b82b72f891 nodeName:}" failed. No retries permitted until 2026-02-02 11:08:52.066503688 +0000 UTC m=+926.403604816 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/0a8916a2-6c71-4678-9a42-23b82b72f891-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" (UID: "0a8916a2-6c71-4678-9a42-23b82b72f891") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 11:08:44 crc kubenswrapper[4838]: I0202 11:08:44.471121 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-metrics-certs\") pod \"openstack-operator-controller-manager-5dcd749f76-wzqhl\" (UID: \"83e1d0f5-af2b-4c12-abbd-712e18108a24\") " pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:08:44 crc kubenswrapper[4838]: I0202 11:08:44.471280 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-webhook-certs\") pod \"openstack-operator-controller-manager-5dcd749f76-wzqhl\" (UID: \"83e1d0f5-af2b-4c12-abbd-712e18108a24\") " pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:08:44 crc kubenswrapper[4838]: E0202 11:08:44.471354 4838 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 11:08:44 crc kubenswrapper[4838]: E0202 11:08:44.471437 4838 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 11:08:44 crc kubenswrapper[4838]: E0202 11:08:44.471482 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-metrics-certs podName:83e1d0f5-af2b-4c12-abbd-712e18108a24 nodeName:}" failed. No retries permitted until 2026-02-02 11:08:52.471430734 +0000 UTC m=+926.808531862 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-metrics-certs") pod "openstack-operator-controller-manager-5dcd749f76-wzqhl" (UID: "83e1d0f5-af2b-4c12-abbd-712e18108a24") : secret "metrics-server-cert" not found Feb 02 11:08:44 crc kubenswrapper[4838]: E0202 11:08:44.471522 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-webhook-certs podName:83e1d0f5-af2b-4c12-abbd-712e18108a24 nodeName:}" failed. No retries permitted until 2026-02-02 11:08:52.471503106 +0000 UTC m=+926.808604144 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-webhook-certs") pod "openstack-operator-controller-manager-5dcd749f76-wzqhl" (UID: "83e1d0f5-af2b-4c12-abbd-712e18108a24") : secret "webhook-server-cert" not found Feb 02 11:08:51 crc kubenswrapper[4838]: I0202 11:08:51.888464 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5e0647d6-93ed-40f1-a522-f5ecf769dd14-cert\") pod \"infra-operator-controller-manager-79955696d6-thn2f\" (UID: \"5e0647d6-93ed-40f1-a522-f5ecf769dd14\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-thn2f" Feb 02 11:08:51 crc kubenswrapper[4838]: I0202 11:08:51.896358 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5e0647d6-93ed-40f1-a522-f5ecf769dd14-cert\") pod \"infra-operator-controller-manager-79955696d6-thn2f\" (UID: \"5e0647d6-93ed-40f1-a522-f5ecf769dd14\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-thn2f" Feb 02 11:08:52 crc kubenswrapper[4838]: E0202 11:08:52.056341 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:1f593e8d49d02b6484c89632192ae54771675c54fbd8426e3675b8e20ecfd7c4" Feb 02 11:08:52 crc kubenswrapper[4838]: E0202 11:08:52.056858 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:1f593e8d49d02b6484c89632192ae54771675c54fbd8426e3675b8e20ecfd7c4,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-z9kxf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-8886f4c47-xftmv_openstack-operators(084b46d2-88a9-42e4-83b2-dbccf264aafe): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:08:52 crc kubenswrapper[4838]: E0202 11:08:52.058147 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-xftmv" podUID="084b46d2-88a9-42e4-83b2-dbccf264aafe" Feb 02 11:08:52 crc kubenswrapper[4838]: I0202 11:08:52.092365 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0a8916a2-6c71-4678-9a42-23b82b72f891-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj\" (UID: \"0a8916a2-6c71-4678-9a42-23b82b72f891\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" Feb 02 11:08:52 crc kubenswrapper[4838]: I0202 11:08:52.097511 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0a8916a2-6c71-4678-9a42-23b82b72f891-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj\" (UID: \"0a8916a2-6c71-4678-9a42-23b82b72f891\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" Feb 02 11:08:52 crc kubenswrapper[4838]: I0202 11:08:52.154875 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-79955696d6-thn2f" Feb 02 11:08:52 crc kubenswrapper[4838]: I0202 11:08:52.212757 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" Feb 02 11:08:52 crc kubenswrapper[4838]: E0202 11:08:52.234126 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:1f593e8d49d02b6484c89632192ae54771675c54fbd8426e3675b8e20ecfd7c4\\\"\"" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-xftmv" podUID="084b46d2-88a9-42e4-83b2-dbccf264aafe" Feb 02 11:08:52 crc kubenswrapper[4838]: I0202 11:08:52.499340 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-webhook-certs\") pod \"openstack-operator-controller-manager-5dcd749f76-wzqhl\" (UID: \"83e1d0f5-af2b-4c12-abbd-712e18108a24\") " pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:08:52 crc kubenswrapper[4838]: I0202 11:08:52.499455 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-metrics-certs\") pod \"openstack-operator-controller-manager-5dcd749f76-wzqhl\" (UID: \"83e1d0f5-af2b-4c12-abbd-712e18108a24\") " pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:08:52 crc kubenswrapper[4838]: I0202 11:08:52.509255 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-webhook-certs\") pod \"openstack-operator-controller-manager-5dcd749f76-wzqhl\" (UID: \"83e1d0f5-af2b-4c12-abbd-712e18108a24\") " pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:08:52 crc kubenswrapper[4838]: I0202 11:08:52.511112 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/83e1d0f5-af2b-4c12-abbd-712e18108a24-metrics-certs\") pod \"openstack-operator-controller-manager-5dcd749f76-wzqhl\" (UID: \"83e1d0f5-af2b-4c12-abbd-712e18108a24\") " pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:08:52 crc kubenswrapper[4838]: I0202 11:08:52.553165 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:08:53 crc kubenswrapper[4838]: E0202 11:08:53.400324 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:027cd7ab61ef5071d9ad6b729c95a98e51cd254642f01dc019d44cc98a9232f8" Feb 02 11:08:53 crc kubenswrapper[4838]: E0202 11:08:53.400882 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:027cd7ab61ef5071d9ad6b729c95a98e51cd254642f01dc019d44cc98a9232f8,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8drm5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-5fb775575f-2kqdq_openstack-operators(ccd2dd8e-7b67-4b94-9b9a-b76fab87903c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:08:53 crc kubenswrapper[4838]: E0202 11:08:53.402442 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-2kqdq" podUID="ccd2dd8e-7b67-4b94-9b9a-b76fab87903c" Feb 02 11:08:54 crc kubenswrapper[4838]: E0202 11:08:54.245696 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:027cd7ab61ef5071d9ad6b729c95a98e51cd254642f01dc019d44cc98a9232f8\\\"\"" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-2kqdq" podUID="ccd2dd8e-7b67-4b94-9b9a-b76fab87903c" Feb 02 11:08:54 crc kubenswrapper[4838]: E0202 11:08:54.477108 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/cinder-operator@sha256:6e21a1dda86ba365817102d23a5d4d2d5dcd1c4d8e5f8d74bd24548aa8c63898" Feb 02 11:08:54 crc kubenswrapper[4838]: E0202 11:08:54.477316 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/cinder-operator@sha256:6e21a1dda86ba365817102d23a5d4d2d5dcd1c4d8e5f8d74bd24548aa8c63898,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-w2zx9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-8d874c8fc-hm4jh_openstack-operators(13ab41db-f38e-4980-89f9-361236526dfa): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:08:54 crc kubenswrapper[4838]: E0202 11:08:54.478532 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-hm4jh" podUID="13ab41db-f38e-4980-89f9-361236526dfa" Feb 02 11:08:55 crc kubenswrapper[4838]: E0202 11:08:55.151553 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.111:5001/openstack-k8s-operators/ironic-operator:f84b67e990d7965f140e929ce7f077ed6e7ec105" Feb 02 11:08:55 crc kubenswrapper[4838]: E0202 11:08:55.151631 4838 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.111:5001/openstack-k8s-operators/ironic-operator:f84b67e990d7965f140e929ce7f077ed6e7ec105" Feb 02 11:08:55 crc kubenswrapper[4838]: E0202 11:08:55.151754 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.111:5001/openstack-k8s-operators/ironic-operator:f84b67e990d7965f140e929ce7f077ed6e7ec105,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-z9b8h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-7c6b8858cc-lk5ts_openstack-operators(79dd465e-2e36-423e-af5b-f41d715c0297): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:08:55 crc kubenswrapper[4838]: E0202 11:08:55.153108 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-7c6b8858cc-lk5ts" podUID="79dd465e-2e36-423e-af5b-f41d715c0297" Feb 02 11:08:55 crc kubenswrapper[4838]: E0202 11:08:55.252882 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.111:5001/openstack-k8s-operators/ironic-operator:f84b67e990d7965f140e929ce7f077ed6e7ec105\\\"\"" pod="openstack-operators/ironic-operator-controller-manager-7c6b8858cc-lk5ts" podUID="79dd465e-2e36-423e-af5b-f41d715c0297" Feb 02 11:08:55 crc kubenswrapper[4838]: E0202 11:08:55.261732 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/cinder-operator@sha256:6e21a1dda86ba365817102d23a5d4d2d5dcd1c4d8e5f8d74bd24548aa8c63898\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-hm4jh" podUID="13ab41db-f38e-4980-89f9-361236526dfa" Feb 02 11:08:55 crc kubenswrapper[4838]: E0202 11:08:55.746023 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b" Feb 02 11:08:55 crc kubenswrapper[4838]: E0202 11:08:55.746186 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lrf7q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-564965969-h6lq5_openstack-operators(149430e7-7b6d-44d0-a474-944271e7bb5e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:08:55 crc kubenswrapper[4838]: E0202 11:08:55.748194 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-564965969-h6lq5" podUID="149430e7-7b6d-44d0-a474-944271e7bb5e" Feb 02 11:08:56 crc kubenswrapper[4838]: E0202 11:08:56.263346 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-564965969-h6lq5" podUID="149430e7-7b6d-44d0-a474-944271e7bb5e" Feb 02 11:08:58 crc kubenswrapper[4838]: I0202 11:08:58.137171 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hq8pt"] Feb 02 11:08:58 crc kubenswrapper[4838]: I0202 11:08:58.139816 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hq8pt" Feb 02 11:08:58 crc kubenswrapper[4838]: I0202 11:08:58.152211 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hq8pt"] Feb 02 11:08:58 crc kubenswrapper[4838]: I0202 11:08:58.281418 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e-catalog-content\") pod \"community-operators-hq8pt\" (UID: \"1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e\") " pod="openshift-marketplace/community-operators-hq8pt" Feb 02 11:08:58 crc kubenswrapper[4838]: I0202 11:08:58.281508 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6wkz\" (UniqueName: \"kubernetes.io/projected/1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e-kube-api-access-h6wkz\") pod \"community-operators-hq8pt\" (UID: \"1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e\") " pod="openshift-marketplace/community-operators-hq8pt" Feb 02 11:08:58 crc kubenswrapper[4838]: I0202 11:08:58.281608 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e-utilities\") pod \"community-operators-hq8pt\" (UID: \"1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e\") " pod="openshift-marketplace/community-operators-hq8pt" Feb 02 11:08:58 crc kubenswrapper[4838]: I0202 11:08:58.382829 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6wkz\" (UniqueName: \"kubernetes.io/projected/1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e-kube-api-access-h6wkz\") pod \"community-operators-hq8pt\" (UID: \"1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e\") " pod="openshift-marketplace/community-operators-hq8pt" Feb 02 11:08:58 crc kubenswrapper[4838]: I0202 11:08:58.382931 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e-utilities\") pod \"community-operators-hq8pt\" (UID: \"1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e\") " pod="openshift-marketplace/community-operators-hq8pt" Feb 02 11:08:58 crc kubenswrapper[4838]: I0202 11:08:58.382989 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e-catalog-content\") pod \"community-operators-hq8pt\" (UID: \"1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e\") " pod="openshift-marketplace/community-operators-hq8pt" Feb 02 11:08:58 crc kubenswrapper[4838]: I0202 11:08:58.383476 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e-utilities\") pod \"community-operators-hq8pt\" (UID: \"1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e\") " pod="openshift-marketplace/community-operators-hq8pt" Feb 02 11:08:58 crc kubenswrapper[4838]: I0202 11:08:58.383514 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e-catalog-content\") pod \"community-operators-hq8pt\" (UID: \"1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e\") " pod="openshift-marketplace/community-operators-hq8pt" Feb 02 11:08:58 crc kubenswrapper[4838]: I0202 11:08:58.412805 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6wkz\" (UniqueName: \"kubernetes.io/projected/1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e-kube-api-access-h6wkz\") pod \"community-operators-hq8pt\" (UID: \"1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e\") " pod="openshift-marketplace/community-operators-hq8pt" Feb 02 11:08:58 crc kubenswrapper[4838]: I0202 11:08:58.476949 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hq8pt" Feb 02 11:09:03 crc kubenswrapper[4838]: E0202 11:09:03.229828 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:5340b88039fac393da49ef4e181b2720c809c27a6bb30531a07a49342a1da45e" Feb 02 11:09:03 crc kubenswrapper[4838]: E0202 11:09:03.230655 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:5340b88039fac393da49ef4e181b2720c809c27a6bb30531a07a49342a1da45e,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kww6h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-55bff696bd-pms7g_openstack-operators(a17b67e7-df64-4f12-8e78-c52068d2b1df): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:09:03 crc kubenswrapper[4838]: E0202 11:09:03.232454 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-pms7g" podUID="a17b67e7-df64-4f12-8e78-c52068d2b1df" Feb 02 11:09:03 crc kubenswrapper[4838]: E0202 11:09:03.314836 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:5340b88039fac393da49ef4e181b2720c809c27a6bb30531a07a49342a1da45e\\\"\"" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-pms7g" podUID="a17b67e7-df64-4f12-8e78-c52068d2b1df" Feb 02 11:09:03 crc kubenswrapper[4838]: I0202 11:09:03.577964 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-49dkb"] Feb 02 11:09:03 crc kubenswrapper[4838]: I0202 11:09:03.579602 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-49dkb" Feb 02 11:09:03 crc kubenswrapper[4838]: I0202 11:09:03.583989 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-49dkb"] Feb 02 11:09:03 crc kubenswrapper[4838]: E0202 11:09:03.675176 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:319c969e88f109b26487a9f5a67203682803d7386424703ab7ca0340be99ae17" Feb 02 11:09:03 crc kubenswrapper[4838]: E0202 11:09:03.675766 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:319c969e88f109b26487a9f5a67203682803d7386424703ab7ca0340be99ae17,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kg4hm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-84f48565d4-tn9zr_openstack-operators(4b7d42b0-25f5-40d4-8deb-34841b6c8c92): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:09:03 crc kubenswrapper[4838]: E0202 11:09:03.676968 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-tn9zr" podUID="4b7d42b0-25f5-40d4-8deb-34841b6c8c92" Feb 02 11:09:03 crc kubenswrapper[4838]: I0202 11:09:03.760781 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62aeb885-450a-4bbe-9f57-1340be79abfe-catalog-content\") pod \"certified-operators-49dkb\" (UID: \"62aeb885-450a-4bbe-9f57-1340be79abfe\") " pod="openshift-marketplace/certified-operators-49dkb" Feb 02 11:09:03 crc kubenswrapper[4838]: I0202 11:09:03.761129 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8r7l\" (UniqueName: \"kubernetes.io/projected/62aeb885-450a-4bbe-9f57-1340be79abfe-kube-api-access-w8r7l\") pod \"certified-operators-49dkb\" (UID: \"62aeb885-450a-4bbe-9f57-1340be79abfe\") " pod="openshift-marketplace/certified-operators-49dkb" Feb 02 11:09:03 crc kubenswrapper[4838]: I0202 11:09:03.761255 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62aeb885-450a-4bbe-9f57-1340be79abfe-utilities\") pod \"certified-operators-49dkb\" (UID: \"62aeb885-450a-4bbe-9f57-1340be79abfe\") " pod="openshift-marketplace/certified-operators-49dkb" Feb 02 11:09:03 crc kubenswrapper[4838]: I0202 11:09:03.862499 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62aeb885-450a-4bbe-9f57-1340be79abfe-catalog-content\") pod \"certified-operators-49dkb\" (UID: \"62aeb885-450a-4bbe-9f57-1340be79abfe\") " pod="openshift-marketplace/certified-operators-49dkb" Feb 02 11:09:03 crc kubenswrapper[4838]: I0202 11:09:03.862576 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8r7l\" (UniqueName: \"kubernetes.io/projected/62aeb885-450a-4bbe-9f57-1340be79abfe-kube-api-access-w8r7l\") pod \"certified-operators-49dkb\" (UID: \"62aeb885-450a-4bbe-9f57-1340be79abfe\") " pod="openshift-marketplace/certified-operators-49dkb" Feb 02 11:09:03 crc kubenswrapper[4838]: I0202 11:09:03.862644 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62aeb885-450a-4bbe-9f57-1340be79abfe-utilities\") pod \"certified-operators-49dkb\" (UID: \"62aeb885-450a-4bbe-9f57-1340be79abfe\") " pod="openshift-marketplace/certified-operators-49dkb" Feb 02 11:09:03 crc kubenswrapper[4838]: I0202 11:09:03.863329 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62aeb885-450a-4bbe-9f57-1340be79abfe-utilities\") pod \"certified-operators-49dkb\" (UID: \"62aeb885-450a-4bbe-9f57-1340be79abfe\") " pod="openshift-marketplace/certified-operators-49dkb" Feb 02 11:09:03 crc kubenswrapper[4838]: I0202 11:09:03.863594 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62aeb885-450a-4bbe-9f57-1340be79abfe-catalog-content\") pod \"certified-operators-49dkb\" (UID: \"62aeb885-450a-4bbe-9f57-1340be79abfe\") " pod="openshift-marketplace/certified-operators-49dkb" Feb 02 11:09:03 crc kubenswrapper[4838]: I0202 11:09:03.888752 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8r7l\" (UniqueName: \"kubernetes.io/projected/62aeb885-450a-4bbe-9f57-1340be79abfe-kube-api-access-w8r7l\") pod \"certified-operators-49dkb\" (UID: \"62aeb885-450a-4bbe-9f57-1340be79abfe\") " pod="openshift-marketplace/certified-operators-49dkb" Feb 02 11:09:03 crc kubenswrapper[4838]: I0202 11:09:03.914052 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-49dkb" Feb 02 11:09:04 crc kubenswrapper[4838]: E0202 11:09:04.319744 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:319c969e88f109b26487a9f5a67203682803d7386424703ab7ca0340be99ae17\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-tn9zr" podUID="4b7d42b0-25f5-40d4-8deb-34841b6c8c92" Feb 02 11:09:15 crc kubenswrapper[4838]: I0202 11:09:15.430364 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:09:15 crc kubenswrapper[4838]: I0202 11:09:15.430824 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:09:26 crc kubenswrapper[4838]: E0202 11:09:26.597342 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:42ad717de1b82267d244b016e5491a5b66a5c3deb6b8c2906a379e1296a2c382" Feb 02 11:09:26 crc kubenswrapper[4838]: E0202 11:09:26.598092 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:42ad717de1b82267d244b016e5491a5b66a5c3deb6b8c2906a379e1296a2c382,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hlwx5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-68fc8c869-8l8mb_openstack-operators(d960011d-30b7-4eb4-9e06-1b8b9aa0a114): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:09:26 crc kubenswrapper[4838]: E0202 11:09:26.599852 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-8l8mb" podUID="d960011d-30b7-4eb4-9e06-1b8b9aa0a114" Feb 02 11:09:26 crc kubenswrapper[4838]: E0202 11:09:26.838511 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be" Feb 02 11:09:26 crc kubenswrapper[4838]: E0202 11:09:26.838941 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rr6js,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-6687f8d877-hp6js_openstack-operators(382292e1-fda7-4ab5-91e7-cf4ade4d6363): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:09:26 crc kubenswrapper[4838]: E0202 11:09:26.840333 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-hp6js" podUID="382292e1-fda7-4ab5-91e7-cf4ade4d6363" Feb 02 11:09:26 crc kubenswrapper[4838]: E0202 11:09:26.901732 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/telemetry-operator@sha256:f9bf288cd0c13912404027a58ea3b90d4092b641e8265adc5c88644ea7fe901a" Feb 02 11:09:26 crc kubenswrapper[4838]: E0202 11:09:26.902116 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:f9bf288cd0c13912404027a58ea3b90d4092b641e8265adc5c88644ea7fe901a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4b496,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-64b5b76f97-4d7p8_openstack-operators(5e7863af-65e8-4d89-a434-fac6c13414cc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:09:26 crc kubenswrapper[4838]: E0202 11:09:26.903440 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4d7p8" podUID="5e7863af-65e8-4d89-a434-fac6c13414cc" Feb 02 11:09:27 crc kubenswrapper[4838]: I0202 11:09:27.387204 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj"] Feb 02 11:09:28 crc kubenswrapper[4838]: E0202 11:09:28.148764 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Feb 02 11:09:28 crc kubenswrapper[4838]: E0202 11:09:28.149027 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gkbss,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-pljzb_openstack-operators(a158eb1e-69b8-48ad-8061-a3e503981572): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:09:28 crc kubenswrapper[4838]: E0202 11:09:28.150251 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pljzb" podUID="a158eb1e-69b8-48ad-8061-a3e503981572" Feb 02 11:09:28 crc kubenswrapper[4838]: I0202 11:09:28.965425 4838 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.224216 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl"] Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.492296 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-thn2f"] Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.616440 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-49dkb"] Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.661655 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hq8pt"] Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.699332 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" event={"ID":"0a8916a2-6c71-4678-9a42-23b82b72f891","Type":"ContainerStarted","Data":"e5bdd6efa78d30d69efad33b8ae50a5b0323ada3b49ca4b966eb67bd9ac5e92b"} Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.701360 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-rs64q" event={"ID":"e358aab4-cbb0-4522-8740-6646b7fdcabd","Type":"ContainerStarted","Data":"02525162ecf4594ba089fb6d8d4cb043b5196f87e6ed6464d568dfe65e117b14"} Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.703946 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-rs64q" Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.732468 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-rs64q" podStartSLOduration=30.565604289 podStartE2EDuration="54.732452881s" podCreationTimestamp="2026-02-02 11:08:35 +0000 UTC" firstStartedPulling="2026-02-02 11:08:36.887164315 +0000 UTC m=+911.224265343" lastFinishedPulling="2026-02-02 11:09:01.054012867 +0000 UTC m=+935.391113935" observedRunningTime="2026-02-02 11:09:29.728762914 +0000 UTC m=+964.065863932" watchObservedRunningTime="2026-02-02 11:09:29.732452881 +0000 UTC m=+964.069553909" Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.734238 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-q6xd2" event={"ID":"8291636d-bcda-4171-825a-f0f3c73b1320","Type":"ContainerStarted","Data":"99f39dfaa224a22f2e261c592c3ebe7c15734486b196774b9efca93274b6bad3"} Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.734379 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-q6xd2" Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.740720 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-49dkb" event={"ID":"62aeb885-450a-4bbe-9f57-1340be79abfe","Type":"ContainerStarted","Data":"a08f274556cda3fade17f747fce643652abb848da9963c28cf9822022ddf8872"} Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.747463 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kbj4l" event={"ID":"7ebd9e27-5249-4c31-86cd-200ec9c3b852","Type":"ContainerStarted","Data":"2b9e1799420ec2d471ea0b6e774c3d066fc4bf0ed76600311ac4806b86edd764"} Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.747658 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kbj4l" Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.757783 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-q6xd2" podStartSLOduration=29.813828694 podStartE2EDuration="54.757762767s" podCreationTimestamp="2026-02-02 11:08:35 +0000 UTC" firstStartedPulling="2026-02-02 11:08:37.329052183 +0000 UTC m=+911.666153211" lastFinishedPulling="2026-02-02 11:09:02.272986216 +0000 UTC m=+936.610087284" observedRunningTime="2026-02-02 11:09:29.751973694 +0000 UTC m=+964.089074722" watchObservedRunningTime="2026-02-02 11:09:29.757762767 +0000 UTC m=+964.094863795" Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.758040 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" event={"ID":"83e1d0f5-af2b-4c12-abbd-712e18108a24","Type":"ContainerStarted","Data":"6ff6d35b68defcd0ebf04d7ce0975df790951e571c0f4c94d99796608e143d1b"} Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.771747 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-79955696d6-thn2f" event={"ID":"5e0647d6-93ed-40f1-a522-f5ecf769dd14","Type":"ContainerStarted","Data":"b562038c2f7d471b5773d9e5ad0632e7b8062b928d1e61372b83aa87d865b936"} Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.775188 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kbj4l" podStartSLOduration=32.876354785 podStartE2EDuration="54.775170524s" podCreationTimestamp="2026-02-02 11:08:35 +0000 UTC" firstStartedPulling="2026-02-02 11:08:37.297321249 +0000 UTC m=+911.634422277" lastFinishedPulling="2026-02-02 11:08:59.196136968 +0000 UTC m=+933.533238016" observedRunningTime="2026-02-02 11:09:29.771549839 +0000 UTC m=+964.108650877" watchObservedRunningTime="2026-02-02 11:09:29.775170524 +0000 UTC m=+964.112271552" Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.789263 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-qrf72" event={"ID":"9cfe65eb-c657-4f96-b48f-1c9831fd75ba","Type":"ContainerStarted","Data":"fe2b2a584cf225ec618d37099b6ec613327b4cc326a5bc1052fd11a25928b66b"} Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.790193 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-qrf72" Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.801087 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-db2x5" event={"ID":"cb4f687b-4b19-447b-beb4-1646c2a40800","Type":"ContainerStarted","Data":"389cbd0c787298f4d3ef98e16527b2a931213e0fdcb0695ce0bcb605be27cfe5"} Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.801306 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-db2x5" Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.838988 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-qrf72" podStartSLOduration=29.642303245 podStartE2EDuration="54.838970112s" podCreationTimestamp="2026-02-02 11:08:35 +0000 UTC" firstStartedPulling="2026-02-02 11:08:37.076267768 +0000 UTC m=+911.413368796" lastFinishedPulling="2026-02-02 11:09:02.272934595 +0000 UTC m=+936.610035663" observedRunningTime="2026-02-02 11:09:29.838548891 +0000 UTC m=+964.175649939" watchObservedRunningTime="2026-02-02 11:09:29.838970112 +0000 UTC m=+964.176071160" Feb 02 11:09:29 crc kubenswrapper[4838]: I0202 11:09:29.865207 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-db2x5" podStartSLOduration=32.371860311 podStartE2EDuration="54.865191631s" podCreationTimestamp="2026-02-02 11:08:35 +0000 UTC" firstStartedPulling="2026-02-02 11:08:36.703007453 +0000 UTC m=+911.040108471" lastFinishedPulling="2026-02-02 11:08:59.196338723 +0000 UTC m=+933.533439791" observedRunningTime="2026-02-02 11:09:29.85829111 +0000 UTC m=+964.195392138" watchObservedRunningTime="2026-02-02 11:09:29.865191631 +0000 UTC m=+964.202292669" Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.813545 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-2kqdq" event={"ID":"ccd2dd8e-7b67-4b94-9b9a-b76fab87903c","Type":"ContainerStarted","Data":"d5e044449868f2af4fc636c88f4531de3ed6ef1f6c438f8874de3166fc91c5e4"} Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.814339 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-2kqdq" Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.817044 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7c6b8858cc-lk5ts" event={"ID":"79dd465e-2e36-423e-af5b-f41d715c0297","Type":"ContainerStarted","Data":"be220c3481e1e58786188c7f9c14495ff58c6fa644ab9eb708ac6651df1e6abb"} Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.817219 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-7c6b8858cc-lk5ts" Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.820052 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-xftmv" event={"ID":"084b46d2-88a9-42e4-83b2-dbccf264aafe","Type":"ContainerStarted","Data":"36e35c386d5550d525ee8afebef642ce819b2be79236d3d4a49234a0d3d8b3f2"} Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.820333 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-xftmv" Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.827937 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-5s687" event={"ID":"75858b44-9a09-43f3-8de5-8ae999ae2657","Type":"ContainerStarted","Data":"79b34f8ce337a25aca118409c73afcd5694b5b9f735dd8a6716068ce1b23bbbe"} Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.828027 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-5s687" Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.833000 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" event={"ID":"83e1d0f5-af2b-4c12-abbd-712e18108a24","Type":"ContainerStarted","Data":"e5605f42fcf763fda49080a93740d96361b40623957abcced6c021e14ad016c7"} Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.833175 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.837153 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-2kqdq" podStartSLOduration=3.853916377 podStartE2EDuration="55.837142746s" podCreationTimestamp="2026-02-02 11:08:35 +0000 UTC" firstStartedPulling="2026-02-02 11:08:37.078186338 +0000 UTC m=+911.415287366" lastFinishedPulling="2026-02-02 11:09:29.061412717 +0000 UTC m=+963.398513735" observedRunningTime="2026-02-02 11:09:30.835592205 +0000 UTC m=+965.172693243" watchObservedRunningTime="2026-02-02 11:09:30.837142746 +0000 UTC m=+965.174243774" Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.853370 4838 generic.go:334] "Generic (PLEG): container finished" podID="1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e" containerID="714e19b74e4e7438b58836254eb35c77d4dfdb9b66f31d71bf606b13148368fb" exitCode=0 Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.853636 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hq8pt" event={"ID":"1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e","Type":"ContainerDied","Data":"714e19b74e4e7438b58836254eb35c77d4dfdb9b66f31d71bf606b13148368fb"} Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.853772 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hq8pt" event={"ID":"1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e","Type":"ContainerStarted","Data":"fceaf9c359c0fc4a71ae1fe2f289c67380b4c6ad286f3c6957e63f5a943e7cc1"} Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.859460 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-5s687" podStartSLOduration=30.758323956 podStartE2EDuration="55.859440752s" podCreationTimestamp="2026-02-02 11:08:35 +0000 UTC" firstStartedPulling="2026-02-02 11:08:37.1718477 +0000 UTC m=+911.508948728" lastFinishedPulling="2026-02-02 11:09:02.272964466 +0000 UTC m=+936.610065524" observedRunningTime="2026-02-02 11:09:30.857874171 +0000 UTC m=+965.194975209" watchObservedRunningTime="2026-02-02 11:09:30.859440752 +0000 UTC m=+965.196541800" Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.869162 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-tn9zr" event={"ID":"4b7d42b0-25f5-40d4-8deb-34841b6c8c92","Type":"ContainerStarted","Data":"538177dfd54ad7dadac88b150d122a95670a9c8fdb5b494a3b8f1c32b1cb1e1c"} Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.869945 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-tn9zr" Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.883201 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5xdv4" event={"ID":"27825541-2816-4017-bba1-0f6f5946bb3c","Type":"ContainerStarted","Data":"ad4a8571cd8dad728e08ab24eb7cc8e62481d084d017ac966d08ec136279ce19"} Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.884103 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5xdv4" Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.896602 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-xftmv" podStartSLOduration=3.695801319 podStartE2EDuration="55.896584329s" podCreationTimestamp="2026-02-02 11:08:35 +0000 UTC" firstStartedPulling="2026-02-02 11:08:36.865440764 +0000 UTC m=+911.202541792" lastFinishedPulling="2026-02-02 11:09:29.066223774 +0000 UTC m=+963.403324802" observedRunningTime="2026-02-02 11:09:30.886447792 +0000 UTC m=+965.223548820" watchObservedRunningTime="2026-02-02 11:09:30.896584329 +0000 UTC m=+965.233685357" Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.903027 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-pms7g" event={"ID":"a17b67e7-df64-4f12-8e78-c52068d2b1df","Type":"ContainerStarted","Data":"f1e1613f4bf0d8ca0c88f3506c473280bf04e2f0413c2ec85ca7d454a7e15389"} Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.903808 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-pms7g" Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.921027 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-7c6b8858cc-lk5ts" podStartSLOduration=3.965358256 podStartE2EDuration="55.921005771s" podCreationTimestamp="2026-02-02 11:08:35 +0000 UTC" firstStartedPulling="2026-02-02 11:08:37.120446658 +0000 UTC m=+911.457547686" lastFinishedPulling="2026-02-02 11:09:29.076094183 +0000 UTC m=+963.413195201" observedRunningTime="2026-02-02 11:09:30.918006562 +0000 UTC m=+965.255107610" watchObservedRunningTime="2026-02-02 11:09:30.921005771 +0000 UTC m=+965.258106799" Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.944822 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lbjxl" event={"ID":"e40ca74c-361a-4102-b7de-35464bb8821b","Type":"ContainerStarted","Data":"c0f4546e577197da43dd104a25089130babce846e2a2549054f5ec447c8445d0"} Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.945454 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lbjxl" Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.953908 4838 generic.go:334] "Generic (PLEG): container finished" podID="62aeb885-450a-4bbe-9f57-1340be79abfe" containerID="6c54674655aa286cf632951e1b8c180c226d0715877d4340d484db4b4dfe7ec1" exitCode=0 Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.953974 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-49dkb" event={"ID":"62aeb885-450a-4bbe-9f57-1340be79abfe","Type":"ContainerDied","Data":"6c54674655aa286cf632951e1b8c180c226d0715877d4340d484db4b4dfe7ec1"} Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.956210 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-52b6h" event={"ID":"53860709-50fb-44d9-910b-d4142608d8d8","Type":"ContainerStarted","Data":"2fce5a17fd2f8891a359f98474829bddcb35bf9f5d1e13c48515723ea9b9215e"} Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.956671 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-52b6h" Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.960237 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-hm4jh" event={"ID":"13ab41db-f38e-4980-89f9-361236526dfa","Type":"ContainerStarted","Data":"689126f37eac32450a31e0aa2ade2199f447de19f5352babd7aa96fa7a7c5001"} Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.960863 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-hm4jh" Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.963903 4838 generic.go:334] "Generic (PLEG): container finished" podID="07a77276-c90a-4b36-bfb6-3da9beccbb2f" containerID="5fb3480d1821dbdd842edfe43e8224bea319e2f1178a65a07442f093a8f99bc7" exitCode=0 Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.963947 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b5m67" event={"ID":"07a77276-c90a-4b36-bfb6-3da9beccbb2f","Type":"ContainerDied","Data":"5fb3480d1821dbdd842edfe43e8224bea319e2f1178a65a07442f093a8f99bc7"} Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.972965 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-564965969-h6lq5" event={"ID":"149430e7-7b6d-44d0-a474-944271e7bb5e","Type":"ContainerStarted","Data":"42e56e86ec55facc037e201ea006f9761f58c827f6186abf342e3b7f3cb674df"} Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.973407 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-564965969-h6lq5" Feb 02 11:09:30 crc kubenswrapper[4838]: I0202 11:09:30.989960 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" podStartSLOduration=54.989932623 podStartE2EDuration="54.989932623s" podCreationTimestamp="2026-02-02 11:08:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:09:30.976910321 +0000 UTC m=+965.314011359" watchObservedRunningTime="2026-02-02 11:09:30.989932623 +0000 UTC m=+965.327033661" Feb 02 11:09:31 crc kubenswrapper[4838]: I0202 11:09:31.058344 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-tn9zr" podStartSLOduration=4.273436865 podStartE2EDuration="56.058326681s" podCreationTimestamp="2026-02-02 11:08:35 +0000 UTC" firstStartedPulling="2026-02-02 11:08:37.296149598 +0000 UTC m=+911.633250626" lastFinishedPulling="2026-02-02 11:09:29.081039414 +0000 UTC m=+963.418140442" observedRunningTime="2026-02-02 11:09:31.026918106 +0000 UTC m=+965.364019154" watchObservedRunningTime="2026-02-02 11:09:31.058326681 +0000 UTC m=+965.395427699" Feb 02 11:09:31 crc kubenswrapper[4838]: I0202 11:09:31.090625 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lbjxl" podStartSLOduration=4.490036771 podStartE2EDuration="56.09059586s" podCreationTimestamp="2026-02-02 11:08:35 +0000 UTC" firstStartedPulling="2026-02-02 11:08:37.431802395 +0000 UTC m=+911.768903423" lastFinishedPulling="2026-02-02 11:09:29.032361484 +0000 UTC m=+963.369462512" observedRunningTime="2026-02-02 11:09:31.088779502 +0000 UTC m=+965.425880560" watchObservedRunningTime="2026-02-02 11:09:31.09059586 +0000 UTC m=+965.427696878" Feb 02 11:09:31 crc kubenswrapper[4838]: I0202 11:09:31.166771 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5xdv4" podStartSLOduration=3.716431493 podStartE2EDuration="55.166745652s" podCreationTimestamp="2026-02-02 11:08:36 +0000 UTC" firstStartedPulling="2026-02-02 11:08:37.581597883 +0000 UTC m=+911.918698911" lastFinishedPulling="2026-02-02 11:09:29.031912042 +0000 UTC m=+963.369013070" observedRunningTime="2026-02-02 11:09:31.119931111 +0000 UTC m=+965.457032159" watchObservedRunningTime="2026-02-02 11:09:31.166745652 +0000 UTC m=+965.503846680" Feb 02 11:09:31 crc kubenswrapper[4838]: I0202 11:09:31.167876 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-pms7g" podStartSLOduration=4.400577689 podStartE2EDuration="56.167869412s" podCreationTimestamp="2026-02-02 11:08:35 +0000 UTC" firstStartedPulling="2026-02-02 11:08:37.31371716 +0000 UTC m=+911.650818188" lastFinishedPulling="2026-02-02 11:09:29.081008883 +0000 UTC m=+963.418109911" observedRunningTime="2026-02-02 11:09:31.156997106 +0000 UTC m=+965.494098134" watchObservedRunningTime="2026-02-02 11:09:31.167869412 +0000 UTC m=+965.504970440" Feb 02 11:09:31 crc kubenswrapper[4838]: I0202 11:09:31.193850 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-52b6h" podStartSLOduration=31.031808317 podStartE2EDuration="56.193836014s" podCreationTimestamp="2026-02-02 11:08:35 +0000 UTC" firstStartedPulling="2026-02-02 11:08:37.110848606 +0000 UTC m=+911.447949634" lastFinishedPulling="2026-02-02 11:09:02.272876283 +0000 UTC m=+936.609977331" observedRunningTime="2026-02-02 11:09:31.185358561 +0000 UTC m=+965.522459619" watchObservedRunningTime="2026-02-02 11:09:31.193836014 +0000 UTC m=+965.530937042" Feb 02 11:09:31 crc kubenswrapper[4838]: I0202 11:09:31.271364 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-hm4jh" podStartSLOduration=4.074749252 podStartE2EDuration="56.271343322s" podCreationTimestamp="2026-02-02 11:08:35 +0000 UTC" firstStartedPulling="2026-02-02 11:08:36.884413563 +0000 UTC m=+911.221514591" lastFinishedPulling="2026-02-02 11:09:29.081007633 +0000 UTC m=+963.418108661" observedRunningTime="2026-02-02 11:09:31.268278582 +0000 UTC m=+965.605379620" watchObservedRunningTime="2026-02-02 11:09:31.271343322 +0000 UTC m=+965.608444360" Feb 02 11:09:31 crc kubenswrapper[4838]: I0202 11:09:31.314059 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-564965969-h6lq5" podStartSLOduration=3.811160154 podStartE2EDuration="55.314036725s" podCreationTimestamp="2026-02-02 11:08:36 +0000 UTC" firstStartedPulling="2026-02-02 11:08:37.578130172 +0000 UTC m=+911.915231200" lastFinishedPulling="2026-02-02 11:09:29.081006743 +0000 UTC m=+963.418107771" observedRunningTime="2026-02-02 11:09:31.309443594 +0000 UTC m=+965.646544632" watchObservedRunningTime="2026-02-02 11:09:31.314036725 +0000 UTC m=+965.651137753" Feb 02 11:09:36 crc kubenswrapper[4838]: I0202 11:09:36.017938 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-db2x5" Feb 02 11:09:36 crc kubenswrapper[4838]: I0202 11:09:36.030045 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-hm4jh" Feb 02 11:09:36 crc kubenswrapper[4838]: I0202 11:09:36.058586 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-rs64q" Feb 02 11:09:36 crc kubenswrapper[4838]: I0202 11:09:36.086812 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-xftmv" Feb 02 11:09:36 crc kubenswrapper[4838]: I0202 11:09:36.120022 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-qrf72" Feb 02 11:09:36 crc kubenswrapper[4838]: I0202 11:09:36.160608 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-2kqdq" Feb 02 11:09:36 crc kubenswrapper[4838]: I0202 11:09:36.328331 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-7c6b8858cc-lk5ts" Feb 02 11:09:36 crc kubenswrapper[4838]: I0202 11:09:36.371588 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-tn9zr" Feb 02 11:09:36 crc kubenswrapper[4838]: I0202 11:09:36.404023 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-5s687" Feb 02 11:09:36 crc kubenswrapper[4838]: I0202 11:09:36.416232 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-q6xd2" Feb 02 11:09:36 crc kubenswrapper[4838]: I0202 11:09:36.435041 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-52b6h" Feb 02 11:09:36 crc kubenswrapper[4838]: I0202 11:09:36.520481 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-pms7g" Feb 02 11:09:36 crc kubenswrapper[4838]: I0202 11:09:36.574392 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-lbjxl" Feb 02 11:09:36 crc kubenswrapper[4838]: I0202 11:09:36.638447 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kbj4l" Feb 02 11:09:36 crc kubenswrapper[4838]: I0202 11:09:36.835995 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-5xdv4" Feb 02 11:09:36 crc kubenswrapper[4838]: I0202 11:09:36.907878 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-564965969-h6lq5" Feb 02 11:09:42 crc kubenswrapper[4838]: I0202 11:09:42.564089 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-5dcd749f76-wzqhl" Feb 02 11:09:43 crc kubenswrapper[4838]: E0202 11:09:43.081277 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pljzb" podUID="a158eb1e-69b8-48ad-8061-a3e503981572" Feb 02 11:09:43 crc kubenswrapper[4838]: E0202 11:09:43.081398 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-hp6js" podUID="382292e1-fda7-4ab5-91e7-cf4ade4d6363" Feb 02 11:09:43 crc kubenswrapper[4838]: E0202 11:09:43.081440 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:42ad717de1b82267d244b016e5491a5b66a5c3deb6b8c2906a379e1296a2c382\\\"\"" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-8l8mb" podUID="d960011d-30b7-4eb4-9e06-1b8b9aa0a114" Feb 02 11:09:43 crc kubenswrapper[4838]: E0202 11:09:43.081535 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:f9bf288cd0c13912404027a58ea3b90d4092b641e8265adc5c88644ea7fe901a\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4d7p8" podUID="5e7863af-65e8-4d89-a434-fac6c13414cc" Feb 02 11:09:45 crc kubenswrapper[4838]: E0202 11:09:45.319260 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:89f6fd332fabefd2fff5619432986b37c1c6d197dd1c510f21dfe4609939b8a6" Feb 02 11:09:45 crc kubenswrapper[4838]: E0202 11:09:45.320352 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:89f6fd332fabefd2fff5619432986b37c1c6d197dd1c510f21dfe4609939b8a6,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter:v0.15.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLOUDKITTY_API_IMAGE_URL_DEFAULT,Value:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLOUDKITTY_PROC_IMAGE_URL_DEFAULT,Value:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-processor:current,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler:release-0.7.12,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter:v1.5.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter:v1.10.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine:current-podified,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d7svk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj_openstack-operators(0a8916a2-6c71-4678-9a42-23b82b72f891): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:09:45 crc kubenswrapper[4838]: E0202 11:09:45.322411 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" podUID="0a8916a2-6c71-4678-9a42-23b82b72f891" Feb 02 11:09:45 crc kubenswrapper[4838]: I0202 11:09:45.430105 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:09:45 crc kubenswrapper[4838]: I0202 11:09:45.430177 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:09:46 crc kubenswrapper[4838]: I0202 11:09:46.108104 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b5m67" event={"ID":"07a77276-c90a-4b36-bfb6-3da9beccbb2f","Type":"ContainerStarted","Data":"ef3ce791bfe1d3fbba67670f7d2f12d309ac14a23644472f77bc3f4ed733046e"} Feb 02 11:09:46 crc kubenswrapper[4838]: I0202 11:09:46.109560 4838 generic.go:334] "Generic (PLEG): container finished" podID="1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e" containerID="6260b2b6827ad8846830a0c67ec67da29c50b168f07b5e1c1cbf7d27e03e8a54" exitCode=0 Feb 02 11:09:46 crc kubenswrapper[4838]: I0202 11:09:46.109632 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hq8pt" event={"ID":"1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e","Type":"ContainerDied","Data":"6260b2b6827ad8846830a0c67ec67da29c50b168f07b5e1c1cbf7d27e03e8a54"} Feb 02 11:09:46 crc kubenswrapper[4838]: I0202 11:09:46.114150 4838 generic.go:334] "Generic (PLEG): container finished" podID="62aeb885-450a-4bbe-9f57-1340be79abfe" containerID="4e5bbd7486b117703b9593468a72c8eb3121cd9fbc167c46f192fd3a4bcc8f60" exitCode=0 Feb 02 11:09:46 crc kubenswrapper[4838]: I0202 11:09:46.114352 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-49dkb" event={"ID":"62aeb885-450a-4bbe-9f57-1340be79abfe","Type":"ContainerDied","Data":"4e5bbd7486b117703b9593468a72c8eb3121cd9fbc167c46f192fd3a4bcc8f60"} Feb 02 11:09:46 crc kubenswrapper[4838]: E0202 11:09:46.115664 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:89f6fd332fabefd2fff5619432986b37c1c6d197dd1c510f21dfe4609939b8a6\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" podUID="0a8916a2-6c71-4678-9a42-23b82b72f891" Feb 02 11:09:46 crc kubenswrapper[4838]: I0202 11:09:46.133378 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-b5m67" podStartSLOduration=3.221667277 podStartE2EDuration="1m9.133361626s" podCreationTimestamp="2026-02-02 11:08:37 +0000 UTC" firstStartedPulling="2026-02-02 11:08:39.177173106 +0000 UTC m=+913.514274134" lastFinishedPulling="2026-02-02 11:09:45.088867425 +0000 UTC m=+979.425968483" observedRunningTime="2026-02-02 11:09:46.1289278 +0000 UTC m=+980.466028838" watchObservedRunningTime="2026-02-02 11:09:46.133361626 +0000 UTC m=+980.470462654" Feb 02 11:09:48 crc kubenswrapper[4838]: I0202 11:09:48.130084 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hq8pt" event={"ID":"1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e","Type":"ContainerStarted","Data":"f5a7531ba2b92550679adedb6b33990a2a0b26b3a0bde76f8e196332f23717a9"} Feb 02 11:09:48 crc kubenswrapper[4838]: I0202 11:09:48.131915 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-79955696d6-thn2f" event={"ID":"5e0647d6-93ed-40f1-a522-f5ecf769dd14","Type":"ContainerStarted","Data":"a264bdb891cb7b131053978067c961ca38ac586ab1e44ffbc007db79ddc1cb62"} Feb 02 11:09:48 crc kubenswrapper[4838]: I0202 11:09:48.132097 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-79955696d6-thn2f" Feb 02 11:09:48 crc kubenswrapper[4838]: I0202 11:09:48.134355 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-49dkb" event={"ID":"62aeb885-450a-4bbe-9f57-1340be79abfe","Type":"ContainerStarted","Data":"1d60f365acc371de2bef46cb8e9c3e98a8b78fa0cb887bd34798bcbedd7d2282"} Feb 02 11:09:48 crc kubenswrapper[4838]: I0202 11:09:48.151046 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hq8pt" podStartSLOduration=33.762133046 podStartE2EDuration="50.151025997s" podCreationTimestamp="2026-02-02 11:08:58 +0000 UTC" firstStartedPulling="2026-02-02 11:09:30.856856734 +0000 UTC m=+965.193957762" lastFinishedPulling="2026-02-02 11:09:47.245749685 +0000 UTC m=+981.582850713" observedRunningTime="2026-02-02 11:09:48.146030376 +0000 UTC m=+982.483131414" watchObservedRunningTime="2026-02-02 11:09:48.151025997 +0000 UTC m=+982.488127045" Feb 02 11:09:48 crc kubenswrapper[4838]: I0202 11:09:48.165228 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-79955696d6-thn2f" podStartSLOduration=55.494287592 podStartE2EDuration="1m13.16520912s" podCreationTimestamp="2026-02-02 11:08:35 +0000 UTC" firstStartedPulling="2026-02-02 11:09:29.578278957 +0000 UTC m=+963.915379985" lastFinishedPulling="2026-02-02 11:09:47.249200485 +0000 UTC m=+981.586301513" observedRunningTime="2026-02-02 11:09:48.163863885 +0000 UTC m=+982.500964913" watchObservedRunningTime="2026-02-02 11:09:48.16520912 +0000 UTC m=+982.502310148" Feb 02 11:09:48 crc kubenswrapper[4838]: I0202 11:09:48.197261 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-49dkb" podStartSLOduration=28.848668242 podStartE2EDuration="45.197235652s" podCreationTimestamp="2026-02-02 11:09:03 +0000 UTC" firstStartedPulling="2026-02-02 11:09:30.955287022 +0000 UTC m=+965.292388050" lastFinishedPulling="2026-02-02 11:09:47.303854432 +0000 UTC m=+981.640955460" observedRunningTime="2026-02-02 11:09:48.189678203 +0000 UTC m=+982.526779281" watchObservedRunningTime="2026-02-02 11:09:48.197235652 +0000 UTC m=+982.534336710" Feb 02 11:09:48 crc kubenswrapper[4838]: I0202 11:09:48.303128 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-b5m67" Feb 02 11:09:48 crc kubenswrapper[4838]: I0202 11:09:48.303181 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-b5m67" Feb 02 11:09:48 crc kubenswrapper[4838]: I0202 11:09:48.351469 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-b5m67" Feb 02 11:09:48 crc kubenswrapper[4838]: I0202 11:09:48.477862 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hq8pt" Feb 02 11:09:48 crc kubenswrapper[4838]: I0202 11:09:48.478841 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hq8pt" Feb 02 11:09:49 crc kubenswrapper[4838]: I0202 11:09:49.519110 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-hq8pt" podUID="1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e" containerName="registry-server" probeResult="failure" output=< Feb 02 11:09:49 crc kubenswrapper[4838]: timeout: failed to connect service ":50051" within 1s Feb 02 11:09:49 crc kubenswrapper[4838]: > Feb 02 11:09:52 crc kubenswrapper[4838]: I0202 11:09:52.169567 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-79955696d6-thn2f" Feb 02 11:09:53 crc kubenswrapper[4838]: I0202 11:09:53.914672 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-49dkb" Feb 02 11:09:53 crc kubenswrapper[4838]: I0202 11:09:53.914976 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-49dkb" Feb 02 11:09:53 crc kubenswrapper[4838]: I0202 11:09:53.956281 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-49dkb" Feb 02 11:09:54 crc kubenswrapper[4838]: I0202 11:09:54.209386 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-49dkb" Feb 02 11:09:54 crc kubenswrapper[4838]: I0202 11:09:54.251480 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-49dkb"] Feb 02 11:09:55 crc kubenswrapper[4838]: I0202 11:09:55.179754 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-hp6js" event={"ID":"382292e1-fda7-4ab5-91e7-cf4ade4d6363","Type":"ContainerStarted","Data":"983fdef5a5292bbabce4cab758d91605929697f810a400aaf3f3fef748abe9a7"} Feb 02 11:09:55 crc kubenswrapper[4838]: I0202 11:09:55.181181 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-hp6js" Feb 02 11:09:55 crc kubenswrapper[4838]: I0202 11:09:55.200780 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-hp6js" podStartSLOduration=3.262353702 podStartE2EDuration="1m20.200760084s" podCreationTimestamp="2026-02-02 11:08:35 +0000 UTC" firstStartedPulling="2026-02-02 11:08:37.446036589 +0000 UTC m=+911.783137617" lastFinishedPulling="2026-02-02 11:09:54.384442971 +0000 UTC m=+988.721543999" observedRunningTime="2026-02-02 11:09:55.194325565 +0000 UTC m=+989.531426613" watchObservedRunningTime="2026-02-02 11:09:55.200760084 +0000 UTC m=+989.537861112" Feb 02 11:09:56 crc kubenswrapper[4838]: I0202 11:09:56.186727 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-49dkb" podUID="62aeb885-450a-4bbe-9f57-1340be79abfe" containerName="registry-server" containerID="cri-o://1d60f365acc371de2bef46cb8e9c3e98a8b78fa0cb887bd34798bcbedd7d2282" gracePeriod=2 Feb 02 11:09:56 crc kubenswrapper[4838]: I0202 11:09:56.666120 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-49dkb" Feb 02 11:09:56 crc kubenswrapper[4838]: I0202 11:09:56.817446 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62aeb885-450a-4bbe-9f57-1340be79abfe-catalog-content\") pod \"62aeb885-450a-4bbe-9f57-1340be79abfe\" (UID: \"62aeb885-450a-4bbe-9f57-1340be79abfe\") " Feb 02 11:09:56 crc kubenswrapper[4838]: I0202 11:09:56.817503 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62aeb885-450a-4bbe-9f57-1340be79abfe-utilities\") pod \"62aeb885-450a-4bbe-9f57-1340be79abfe\" (UID: \"62aeb885-450a-4bbe-9f57-1340be79abfe\") " Feb 02 11:09:56 crc kubenswrapper[4838]: I0202 11:09:56.817539 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8r7l\" (UniqueName: \"kubernetes.io/projected/62aeb885-450a-4bbe-9f57-1340be79abfe-kube-api-access-w8r7l\") pod \"62aeb885-450a-4bbe-9f57-1340be79abfe\" (UID: \"62aeb885-450a-4bbe-9f57-1340be79abfe\") " Feb 02 11:09:56 crc kubenswrapper[4838]: I0202 11:09:56.818258 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62aeb885-450a-4bbe-9f57-1340be79abfe-utilities" (OuterVolumeSpecName: "utilities") pod "62aeb885-450a-4bbe-9f57-1340be79abfe" (UID: "62aeb885-450a-4bbe-9f57-1340be79abfe"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:09:56 crc kubenswrapper[4838]: I0202 11:09:56.822979 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62aeb885-450a-4bbe-9f57-1340be79abfe-kube-api-access-w8r7l" (OuterVolumeSpecName: "kube-api-access-w8r7l") pod "62aeb885-450a-4bbe-9f57-1340be79abfe" (UID: "62aeb885-450a-4bbe-9f57-1340be79abfe"). InnerVolumeSpecName "kube-api-access-w8r7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:09:56 crc kubenswrapper[4838]: I0202 11:09:56.873768 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62aeb885-450a-4bbe-9f57-1340be79abfe-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "62aeb885-450a-4bbe-9f57-1340be79abfe" (UID: "62aeb885-450a-4bbe-9f57-1340be79abfe"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:09:56 crc kubenswrapper[4838]: I0202 11:09:56.920167 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62aeb885-450a-4bbe-9f57-1340be79abfe-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 11:09:56 crc kubenswrapper[4838]: I0202 11:09:56.920202 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62aeb885-450a-4bbe-9f57-1340be79abfe-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 11:09:56 crc kubenswrapper[4838]: I0202 11:09:56.920213 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8r7l\" (UniqueName: \"kubernetes.io/projected/62aeb885-450a-4bbe-9f57-1340be79abfe-kube-api-access-w8r7l\") on node \"crc\" DevicePath \"\"" Feb 02 11:09:57 crc kubenswrapper[4838]: I0202 11:09:57.194572 4838 generic.go:334] "Generic (PLEG): container finished" podID="62aeb885-450a-4bbe-9f57-1340be79abfe" containerID="1d60f365acc371de2bef46cb8e9c3e98a8b78fa0cb887bd34798bcbedd7d2282" exitCode=0 Feb 02 11:09:57 crc kubenswrapper[4838]: I0202 11:09:57.194649 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-49dkb" Feb 02 11:09:57 crc kubenswrapper[4838]: I0202 11:09:57.194656 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-49dkb" event={"ID":"62aeb885-450a-4bbe-9f57-1340be79abfe","Type":"ContainerDied","Data":"1d60f365acc371de2bef46cb8e9c3e98a8b78fa0cb887bd34798bcbedd7d2282"} Feb 02 11:09:57 crc kubenswrapper[4838]: I0202 11:09:57.194703 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-49dkb" event={"ID":"62aeb885-450a-4bbe-9f57-1340be79abfe","Type":"ContainerDied","Data":"a08f274556cda3fade17f747fce643652abb848da9963c28cf9822022ddf8872"} Feb 02 11:09:57 crc kubenswrapper[4838]: I0202 11:09:57.194724 4838 scope.go:117] "RemoveContainer" containerID="1d60f365acc371de2bef46cb8e9c3e98a8b78fa0cb887bd34798bcbedd7d2282" Feb 02 11:09:57 crc kubenswrapper[4838]: I0202 11:09:57.226500 4838 scope.go:117] "RemoveContainer" containerID="4e5bbd7486b117703b9593468a72c8eb3121cd9fbc167c46f192fd3a4bcc8f60" Feb 02 11:09:57 crc kubenswrapper[4838]: I0202 11:09:57.233009 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-49dkb"] Feb 02 11:09:57 crc kubenswrapper[4838]: I0202 11:09:57.238053 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-49dkb"] Feb 02 11:09:57 crc kubenswrapper[4838]: I0202 11:09:57.253133 4838 scope.go:117] "RemoveContainer" containerID="6c54674655aa286cf632951e1b8c180c226d0715877d4340d484db4b4dfe7ec1" Feb 02 11:09:57 crc kubenswrapper[4838]: I0202 11:09:57.272884 4838 scope.go:117] "RemoveContainer" containerID="1d60f365acc371de2bef46cb8e9c3e98a8b78fa0cb887bd34798bcbedd7d2282" Feb 02 11:09:57 crc kubenswrapper[4838]: E0202 11:09:57.273432 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d60f365acc371de2bef46cb8e9c3e98a8b78fa0cb887bd34798bcbedd7d2282\": container with ID starting with 1d60f365acc371de2bef46cb8e9c3e98a8b78fa0cb887bd34798bcbedd7d2282 not found: ID does not exist" containerID="1d60f365acc371de2bef46cb8e9c3e98a8b78fa0cb887bd34798bcbedd7d2282" Feb 02 11:09:57 crc kubenswrapper[4838]: I0202 11:09:57.273465 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d60f365acc371de2bef46cb8e9c3e98a8b78fa0cb887bd34798bcbedd7d2282"} err="failed to get container status \"1d60f365acc371de2bef46cb8e9c3e98a8b78fa0cb887bd34798bcbedd7d2282\": rpc error: code = NotFound desc = could not find container \"1d60f365acc371de2bef46cb8e9c3e98a8b78fa0cb887bd34798bcbedd7d2282\": container with ID starting with 1d60f365acc371de2bef46cb8e9c3e98a8b78fa0cb887bd34798bcbedd7d2282 not found: ID does not exist" Feb 02 11:09:57 crc kubenswrapper[4838]: I0202 11:09:57.273488 4838 scope.go:117] "RemoveContainer" containerID="4e5bbd7486b117703b9593468a72c8eb3121cd9fbc167c46f192fd3a4bcc8f60" Feb 02 11:09:57 crc kubenswrapper[4838]: E0202 11:09:57.273752 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e5bbd7486b117703b9593468a72c8eb3121cd9fbc167c46f192fd3a4bcc8f60\": container with ID starting with 4e5bbd7486b117703b9593468a72c8eb3121cd9fbc167c46f192fd3a4bcc8f60 not found: ID does not exist" containerID="4e5bbd7486b117703b9593468a72c8eb3121cd9fbc167c46f192fd3a4bcc8f60" Feb 02 11:09:57 crc kubenswrapper[4838]: I0202 11:09:57.273777 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e5bbd7486b117703b9593468a72c8eb3121cd9fbc167c46f192fd3a4bcc8f60"} err="failed to get container status \"4e5bbd7486b117703b9593468a72c8eb3121cd9fbc167c46f192fd3a4bcc8f60\": rpc error: code = NotFound desc = could not find container \"4e5bbd7486b117703b9593468a72c8eb3121cd9fbc167c46f192fd3a4bcc8f60\": container with ID starting with 4e5bbd7486b117703b9593468a72c8eb3121cd9fbc167c46f192fd3a4bcc8f60 not found: ID does not exist" Feb 02 11:09:57 crc kubenswrapper[4838]: I0202 11:09:57.273792 4838 scope.go:117] "RemoveContainer" containerID="6c54674655aa286cf632951e1b8c180c226d0715877d4340d484db4b4dfe7ec1" Feb 02 11:09:57 crc kubenswrapper[4838]: E0202 11:09:57.274160 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c54674655aa286cf632951e1b8c180c226d0715877d4340d484db4b4dfe7ec1\": container with ID starting with 6c54674655aa286cf632951e1b8c180c226d0715877d4340d484db4b4dfe7ec1 not found: ID does not exist" containerID="6c54674655aa286cf632951e1b8c180c226d0715877d4340d484db4b4dfe7ec1" Feb 02 11:09:57 crc kubenswrapper[4838]: I0202 11:09:57.274181 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c54674655aa286cf632951e1b8c180c226d0715877d4340d484db4b4dfe7ec1"} err="failed to get container status \"6c54674655aa286cf632951e1b8c180c226d0715877d4340d484db4b4dfe7ec1\": rpc error: code = NotFound desc = could not find container \"6c54674655aa286cf632951e1b8c180c226d0715877d4340d484db4b4dfe7ec1\": container with ID starting with 6c54674655aa286cf632951e1b8c180c226d0715877d4340d484db4b4dfe7ec1 not found: ID does not exist" Feb 02 11:09:58 crc kubenswrapper[4838]: I0202 11:09:58.202054 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pljzb" event={"ID":"a158eb1e-69b8-48ad-8061-a3e503981572","Type":"ContainerStarted","Data":"7d2f2f5cd71761fe48fde20c585d8ecec362241b7d42b9a8fe1e323aca77766f"} Feb 02 11:09:58 crc kubenswrapper[4838]: I0202 11:09:58.224075 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pljzb" podStartSLOduration=2.332312202 podStartE2EDuration="1m22.224051965s" podCreationTimestamp="2026-02-02 11:08:36 +0000 UTC" firstStartedPulling="2026-02-02 11:08:37.581905771 +0000 UTC m=+911.919006799" lastFinishedPulling="2026-02-02 11:09:57.473645514 +0000 UTC m=+991.810746562" observedRunningTime="2026-02-02 11:09:58.219228388 +0000 UTC m=+992.556329416" watchObservedRunningTime="2026-02-02 11:09:58.224051965 +0000 UTC m=+992.561153003" Feb 02 11:09:58 crc kubenswrapper[4838]: I0202 11:09:58.350971 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-b5m67" Feb 02 11:09:58 crc kubenswrapper[4838]: I0202 11:09:58.523257 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62aeb885-450a-4bbe-9f57-1340be79abfe" path="/var/lib/kubelet/pods/62aeb885-450a-4bbe-9f57-1340be79abfe/volumes" Feb 02 11:09:58 crc kubenswrapper[4838]: I0202 11:09:58.532154 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hq8pt" Feb 02 11:09:58 crc kubenswrapper[4838]: I0202 11:09:58.586609 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hq8pt" Feb 02 11:10:00 crc kubenswrapper[4838]: I0202 11:10:00.597283 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b5m67"] Feb 02 11:10:00 crc kubenswrapper[4838]: I0202 11:10:00.597572 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-b5m67" podUID="07a77276-c90a-4b36-bfb6-3da9beccbb2f" containerName="registry-server" containerID="cri-o://ef3ce791bfe1d3fbba67670f7d2f12d309ac14a23644472f77bc3f4ed733046e" gracePeriod=2 Feb 02 11:10:00 crc kubenswrapper[4838]: I0202 11:10:00.784427 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hq8pt"] Feb 02 11:10:00 crc kubenswrapper[4838]: I0202 11:10:00.784665 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hq8pt" podUID="1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e" containerName="registry-server" containerID="cri-o://f5a7531ba2b92550679adedb6b33990a2a0b26b3a0bde76f8e196332f23717a9" gracePeriod=2 Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.236317 4838 generic.go:334] "Generic (PLEG): container finished" podID="1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e" containerID="f5a7531ba2b92550679adedb6b33990a2a0b26b3a0bde76f8e196332f23717a9" exitCode=0 Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.236421 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hq8pt" event={"ID":"1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e","Type":"ContainerDied","Data":"f5a7531ba2b92550679adedb6b33990a2a0b26b3a0bde76f8e196332f23717a9"} Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.251693 4838 generic.go:334] "Generic (PLEG): container finished" podID="07a77276-c90a-4b36-bfb6-3da9beccbb2f" containerID="ef3ce791bfe1d3fbba67670f7d2f12d309ac14a23644472f77bc3f4ed733046e" exitCode=0 Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.251740 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b5m67" event={"ID":"07a77276-c90a-4b36-bfb6-3da9beccbb2f","Type":"ContainerDied","Data":"ef3ce791bfe1d3fbba67670f7d2f12d309ac14a23644472f77bc3f4ed733046e"} Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.543454 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hq8pt" Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.671413 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b5m67" Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.713040 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e-utilities\") pod \"1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e\" (UID: \"1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e\") " Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.713088 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e-catalog-content\") pod \"1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e\" (UID: \"1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e\") " Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.713201 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6wkz\" (UniqueName: \"kubernetes.io/projected/1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e-kube-api-access-h6wkz\") pod \"1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e\" (UID: \"1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e\") " Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.714143 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e-utilities" (OuterVolumeSpecName: "utilities") pod "1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e" (UID: "1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.719760 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e-kube-api-access-h6wkz" (OuterVolumeSpecName: "kube-api-access-h6wkz") pod "1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e" (UID: "1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e"). InnerVolumeSpecName "kube-api-access-h6wkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.769372 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e" (UID: "1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.814964 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m4779\" (UniqueName: \"kubernetes.io/projected/07a77276-c90a-4b36-bfb6-3da9beccbb2f-kube-api-access-m4779\") pod \"07a77276-c90a-4b36-bfb6-3da9beccbb2f\" (UID: \"07a77276-c90a-4b36-bfb6-3da9beccbb2f\") " Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.815070 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07a77276-c90a-4b36-bfb6-3da9beccbb2f-catalog-content\") pod \"07a77276-c90a-4b36-bfb6-3da9beccbb2f\" (UID: \"07a77276-c90a-4b36-bfb6-3da9beccbb2f\") " Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.815131 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07a77276-c90a-4b36-bfb6-3da9beccbb2f-utilities\") pod \"07a77276-c90a-4b36-bfb6-3da9beccbb2f\" (UID: \"07a77276-c90a-4b36-bfb6-3da9beccbb2f\") " Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.815443 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.815469 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.815482 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6wkz\" (UniqueName: \"kubernetes.io/projected/1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e-kube-api-access-h6wkz\") on node \"crc\" DevicePath \"\"" Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.816442 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07a77276-c90a-4b36-bfb6-3da9beccbb2f-utilities" (OuterVolumeSpecName: "utilities") pod "07a77276-c90a-4b36-bfb6-3da9beccbb2f" (UID: "07a77276-c90a-4b36-bfb6-3da9beccbb2f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.820369 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07a77276-c90a-4b36-bfb6-3da9beccbb2f-kube-api-access-m4779" (OuterVolumeSpecName: "kube-api-access-m4779") pod "07a77276-c90a-4b36-bfb6-3da9beccbb2f" (UID: "07a77276-c90a-4b36-bfb6-3da9beccbb2f"). InnerVolumeSpecName "kube-api-access-m4779". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.836420 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07a77276-c90a-4b36-bfb6-3da9beccbb2f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "07a77276-c90a-4b36-bfb6-3da9beccbb2f" (UID: "07a77276-c90a-4b36-bfb6-3da9beccbb2f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.916304 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m4779\" (UniqueName: \"kubernetes.io/projected/07a77276-c90a-4b36-bfb6-3da9beccbb2f-kube-api-access-m4779\") on node \"crc\" DevicePath \"\"" Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.916355 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07a77276-c90a-4b36-bfb6-3da9beccbb2f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 11:10:02 crc kubenswrapper[4838]: I0202 11:10:02.916372 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07a77276-c90a-4b36-bfb6-3da9beccbb2f-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.259359 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-8l8mb" event={"ID":"d960011d-30b7-4eb4-9e06-1b8b9aa0a114","Type":"ContainerStarted","Data":"029875a7559c2993015c56b72895d04347bb97dccbf0de91964b69586223a137"} Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.259563 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-8l8mb" Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.262101 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b5m67" event={"ID":"07a77276-c90a-4b36-bfb6-3da9beccbb2f","Type":"ContainerDied","Data":"5eedc88f20c36ee3bcddce6d9835ea06f84a54b7f5557ca0bdaab0229eb58977"} Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.262142 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b5m67" Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.262177 4838 scope.go:117] "RemoveContainer" containerID="ef3ce791bfe1d3fbba67670f7d2f12d309ac14a23644472f77bc3f4ed733046e" Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.264769 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hq8pt" event={"ID":"1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e","Type":"ContainerDied","Data":"fceaf9c359c0fc4a71ae1fe2f289c67380b4c6ad286f3c6957e63f5a943e7cc1"} Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.264788 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hq8pt" Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.266868 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4d7p8" event={"ID":"5e7863af-65e8-4d89-a434-fac6c13414cc","Type":"ContainerStarted","Data":"2787ac1a4b5933e6ab5f39f0189aa213b4b650f8c623ae94046d0abe3901e33b"} Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.267371 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4d7p8" Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.269491 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" event={"ID":"0a8916a2-6c71-4678-9a42-23b82b72f891","Type":"ContainerStarted","Data":"1eccbc3972f4a1d5f28b74d178e399bf2f8f6a6ccea4019920221279aea10cf1"} Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.269824 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.291874 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-8l8mb" podStartSLOduration=3.743116102 podStartE2EDuration="1m28.291854501s" podCreationTimestamp="2026-02-02 11:08:35 +0000 UTC" firstStartedPulling="2026-02-02 11:08:37.486234106 +0000 UTC m=+911.823335134" lastFinishedPulling="2026-02-02 11:10:02.034972495 +0000 UTC m=+996.372073533" observedRunningTime="2026-02-02 11:10:03.28609842 +0000 UTC m=+997.623199478" watchObservedRunningTime="2026-02-02 11:10:03.291854501 +0000 UTC m=+997.628955539" Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.319397 4838 scope.go:117] "RemoveContainer" containerID="5fb3480d1821dbdd842edfe43e8224bea319e2f1178a65a07442f093a8f99bc7" Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.342137 4838 scope.go:117] "RemoveContainer" containerID="7de9a63967a263a87ac401cd2982cf61d02aed029f2357e0485183c8b58c06a8" Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.343236 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" podStartSLOduration=55.067354555 podStartE2EDuration="1m28.343222322s" podCreationTimestamp="2026-02-02 11:08:35 +0000 UTC" firstStartedPulling="2026-02-02 11:09:28.962097106 +0000 UTC m=+963.299198144" lastFinishedPulling="2026-02-02 11:10:02.237964873 +0000 UTC m=+996.575065911" observedRunningTime="2026-02-02 11:10:03.31881534 +0000 UTC m=+997.655916368" watchObservedRunningTime="2026-02-02 11:10:03.343222322 +0000 UTC m=+997.680323370" Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.350895 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4d7p8" podStartSLOduration=2.797878362 podStartE2EDuration="1m27.350875293s" podCreationTimestamp="2026-02-02 11:08:36 +0000 UTC" firstStartedPulling="2026-02-02 11:08:37.484402548 +0000 UTC m=+911.821503576" lastFinishedPulling="2026-02-02 11:10:02.037399479 +0000 UTC m=+996.374500507" observedRunningTime="2026-02-02 11:10:03.341499447 +0000 UTC m=+997.678600485" watchObservedRunningTime="2026-02-02 11:10:03.350875293 +0000 UTC m=+997.687976341" Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.361002 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b5m67"] Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.367404 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-b5m67"] Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.384106 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hq8pt"] Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.392712 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hq8pt"] Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.384723 4838 scope.go:117] "RemoveContainer" containerID="f5a7531ba2b92550679adedb6b33990a2a0b26b3a0bde76f8e196332f23717a9" Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.411643 4838 scope.go:117] "RemoveContainer" containerID="6260b2b6827ad8846830a0c67ec67da29c50b168f07b5e1c1cbf7d27e03e8a54" Feb 02 11:10:03 crc kubenswrapper[4838]: I0202 11:10:03.430801 4838 scope.go:117] "RemoveContainer" containerID="714e19b74e4e7438b58836254eb35c77d4dfdb9b66f31d71bf606b13148368fb" Feb 02 11:10:04 crc kubenswrapper[4838]: I0202 11:10:04.521358 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07a77276-c90a-4b36-bfb6-3da9beccbb2f" path="/var/lib/kubelet/pods/07a77276-c90a-4b36-bfb6-3da9beccbb2f/volumes" Feb 02 11:10:04 crc kubenswrapper[4838]: I0202 11:10:04.522985 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e" path="/var/lib/kubelet/pods/1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e/volumes" Feb 02 11:10:06 crc kubenswrapper[4838]: I0202 11:10:06.543167 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-hp6js" Feb 02 11:10:12 crc kubenswrapper[4838]: I0202 11:10:12.219141 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj" Feb 02 11:10:15 crc kubenswrapper[4838]: I0202 11:10:15.943020 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:10:15 crc kubenswrapper[4838]: I0202 11:10:15.943075 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:10:15 crc kubenswrapper[4838]: I0202 11:10:15.943119 4838 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 11:10:15 crc kubenswrapper[4838]: I0202 11:10:15.944987 4838 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"411e6aa6542cc291703765b915acdf4b4838b2ed95b8455f8ee0b804a9cfdae7"} pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 11:10:15 crc kubenswrapper[4838]: I0202 11:10:15.945065 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" containerID="cri-o://411e6aa6542cc291703765b915acdf4b4838b2ed95b8455f8ee0b804a9cfdae7" gracePeriod=600 Feb 02 11:10:16 crc kubenswrapper[4838]: I0202 11:10:16.671891 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-8l8mb" Feb 02 11:10:16 crc kubenswrapper[4838]: I0202 11:10:16.722509 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-4d7p8" Feb 02 11:10:16 crc kubenswrapper[4838]: I0202 11:10:16.964168 4838 generic.go:334] "Generic (PLEG): container finished" podID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerID="411e6aa6542cc291703765b915acdf4b4838b2ed95b8455f8ee0b804a9cfdae7" exitCode=0 Feb 02 11:10:16 crc kubenswrapper[4838]: I0202 11:10:16.964230 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerDied","Data":"411e6aa6542cc291703765b915acdf4b4838b2ed95b8455f8ee0b804a9cfdae7"} Feb 02 11:10:16 crc kubenswrapper[4838]: I0202 11:10:16.964275 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerStarted","Data":"34c7a9cc4d8fb6168afba32d2440c7d9ab6f69f8c80d4ae7f515c16fdb162626"} Feb 02 11:10:16 crc kubenswrapper[4838]: I0202 11:10:16.964298 4838 scope.go:117] "RemoveContainer" containerID="47f84caf6e841371c2d1b572818b4b359f4d7377669649aa2f737bf7eb7b98db" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.157577 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-92wp6"] Feb 02 11:10:33 crc kubenswrapper[4838]: E0202 11:10:33.159778 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07a77276-c90a-4b36-bfb6-3da9beccbb2f" containerName="extract-content" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.159795 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="07a77276-c90a-4b36-bfb6-3da9beccbb2f" containerName="extract-content" Feb 02 11:10:33 crc kubenswrapper[4838]: E0202 11:10:33.159803 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62aeb885-450a-4bbe-9f57-1340be79abfe" containerName="registry-server" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.159809 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="62aeb885-450a-4bbe-9f57-1340be79abfe" containerName="registry-server" Feb 02 11:10:33 crc kubenswrapper[4838]: E0202 11:10:33.159818 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07a77276-c90a-4b36-bfb6-3da9beccbb2f" containerName="registry-server" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.159825 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="07a77276-c90a-4b36-bfb6-3da9beccbb2f" containerName="registry-server" Feb 02 11:10:33 crc kubenswrapper[4838]: E0202 11:10:33.159836 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62aeb885-450a-4bbe-9f57-1340be79abfe" containerName="extract-utilities" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.159842 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="62aeb885-450a-4bbe-9f57-1340be79abfe" containerName="extract-utilities" Feb 02 11:10:33 crc kubenswrapper[4838]: E0202 11:10:33.159857 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e" containerName="registry-server" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.159863 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e" containerName="registry-server" Feb 02 11:10:33 crc kubenswrapper[4838]: E0202 11:10:33.159873 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07a77276-c90a-4b36-bfb6-3da9beccbb2f" containerName="extract-utilities" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.159880 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="07a77276-c90a-4b36-bfb6-3da9beccbb2f" containerName="extract-utilities" Feb 02 11:10:33 crc kubenswrapper[4838]: E0202 11:10:33.159887 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e" containerName="extract-utilities" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.159892 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e" containerName="extract-utilities" Feb 02 11:10:33 crc kubenswrapper[4838]: E0202 11:10:33.159903 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e" containerName="extract-content" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.159908 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e" containerName="extract-content" Feb 02 11:10:33 crc kubenswrapper[4838]: E0202 11:10:33.159916 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62aeb885-450a-4bbe-9f57-1340be79abfe" containerName="extract-content" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.159921 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="62aeb885-450a-4bbe-9f57-1340be79abfe" containerName="extract-content" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.160049 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c4c8d2c-2fd0-4ca4-b438-15151a61fe1e" containerName="registry-server" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.160059 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="62aeb885-450a-4bbe-9f57-1340be79abfe" containerName="registry-server" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.160079 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="07a77276-c90a-4b36-bfb6-3da9beccbb2f" containerName="registry-server" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.160841 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-92wp6" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.164304 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.164528 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.164725 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.164755 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-cmwht" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.168295 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-92wp6"] Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.255442 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82d26ae8-23be-4861-9384-59cb3e20640d-config\") pod \"dnsmasq-dns-675f4bcbfc-92wp6\" (UID: \"82d26ae8-23be-4861-9384-59cb3e20640d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-92wp6" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.255509 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rhhp\" (UniqueName: \"kubernetes.io/projected/82d26ae8-23be-4861-9384-59cb3e20640d-kube-api-access-9rhhp\") pod \"dnsmasq-dns-675f4bcbfc-92wp6\" (UID: \"82d26ae8-23be-4861-9384-59cb3e20640d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-92wp6" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.269667 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-5lnnj"] Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.271037 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-5lnnj" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.273334 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.284601 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-5lnnj"] Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.356676 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rhhp\" (UniqueName: \"kubernetes.io/projected/82d26ae8-23be-4861-9384-59cb3e20640d-kube-api-access-9rhhp\") pod \"dnsmasq-dns-675f4bcbfc-92wp6\" (UID: \"82d26ae8-23be-4861-9384-59cb3e20640d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-92wp6" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.356793 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nr7f7\" (UniqueName: \"kubernetes.io/projected/b9ae6407-ad21-4b11-9e2c-ebd7441c5e96-kube-api-access-nr7f7\") pod \"dnsmasq-dns-78dd6ddcc-5lnnj\" (UID: \"b9ae6407-ad21-4b11-9e2c-ebd7441c5e96\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5lnnj" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.356867 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82d26ae8-23be-4861-9384-59cb3e20640d-config\") pod \"dnsmasq-dns-675f4bcbfc-92wp6\" (UID: \"82d26ae8-23be-4861-9384-59cb3e20640d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-92wp6" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.356882 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b9ae6407-ad21-4b11-9e2c-ebd7441c5e96-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-5lnnj\" (UID: \"b9ae6407-ad21-4b11-9e2c-ebd7441c5e96\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5lnnj" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.356918 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9ae6407-ad21-4b11-9e2c-ebd7441c5e96-config\") pod \"dnsmasq-dns-78dd6ddcc-5lnnj\" (UID: \"b9ae6407-ad21-4b11-9e2c-ebd7441c5e96\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5lnnj" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.358210 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82d26ae8-23be-4861-9384-59cb3e20640d-config\") pod \"dnsmasq-dns-675f4bcbfc-92wp6\" (UID: \"82d26ae8-23be-4861-9384-59cb3e20640d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-92wp6" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.376604 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rhhp\" (UniqueName: \"kubernetes.io/projected/82d26ae8-23be-4861-9384-59cb3e20640d-kube-api-access-9rhhp\") pod \"dnsmasq-dns-675f4bcbfc-92wp6\" (UID: \"82d26ae8-23be-4861-9384-59cb3e20640d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-92wp6" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.457665 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nr7f7\" (UniqueName: \"kubernetes.io/projected/b9ae6407-ad21-4b11-9e2c-ebd7441c5e96-kube-api-access-nr7f7\") pod \"dnsmasq-dns-78dd6ddcc-5lnnj\" (UID: \"b9ae6407-ad21-4b11-9e2c-ebd7441c5e96\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5lnnj" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.457762 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b9ae6407-ad21-4b11-9e2c-ebd7441c5e96-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-5lnnj\" (UID: \"b9ae6407-ad21-4b11-9e2c-ebd7441c5e96\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5lnnj" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.457786 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9ae6407-ad21-4b11-9e2c-ebd7441c5e96-config\") pod \"dnsmasq-dns-78dd6ddcc-5lnnj\" (UID: \"b9ae6407-ad21-4b11-9e2c-ebd7441c5e96\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5lnnj" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.458734 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9ae6407-ad21-4b11-9e2c-ebd7441c5e96-config\") pod \"dnsmasq-dns-78dd6ddcc-5lnnj\" (UID: \"b9ae6407-ad21-4b11-9e2c-ebd7441c5e96\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5lnnj" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.458777 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b9ae6407-ad21-4b11-9e2c-ebd7441c5e96-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-5lnnj\" (UID: \"b9ae6407-ad21-4b11-9e2c-ebd7441c5e96\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5lnnj" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.473394 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nr7f7\" (UniqueName: \"kubernetes.io/projected/b9ae6407-ad21-4b11-9e2c-ebd7441c5e96-kube-api-access-nr7f7\") pod \"dnsmasq-dns-78dd6ddcc-5lnnj\" (UID: \"b9ae6407-ad21-4b11-9e2c-ebd7441c5e96\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5lnnj" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.481065 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-92wp6" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.632822 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-5lnnj" Feb 02 11:10:33 crc kubenswrapper[4838]: I0202 11:10:33.946219 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-92wp6"] Feb 02 11:10:34 crc kubenswrapper[4838]: I0202 11:10:34.053783 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-5lnnj"] Feb 02 11:10:34 crc kubenswrapper[4838]: W0202 11:10:34.060024 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9ae6407_ad21_4b11_9e2c_ebd7441c5e96.slice/crio-0352ce175f716fd85672adb4928a150d1da6077d2abb4c17fb67b6d817130dc7 WatchSource:0}: Error finding container 0352ce175f716fd85672adb4928a150d1da6077d2abb4c17fb67b6d817130dc7: Status 404 returned error can't find the container with id 0352ce175f716fd85672adb4928a150d1da6077d2abb4c17fb67b6d817130dc7 Feb 02 11:10:34 crc kubenswrapper[4838]: I0202 11:10:34.126810 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-5lnnj" event={"ID":"b9ae6407-ad21-4b11-9e2c-ebd7441c5e96","Type":"ContainerStarted","Data":"0352ce175f716fd85672adb4928a150d1da6077d2abb4c17fb67b6d817130dc7"} Feb 02 11:10:34 crc kubenswrapper[4838]: I0202 11:10:34.131433 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-92wp6" event={"ID":"82d26ae8-23be-4861-9384-59cb3e20640d","Type":"ContainerStarted","Data":"565f98449556fad39e3d6f947cf9a70a31c94077dec3144d0af77068df5f0943"} Feb 02 11:10:35 crc kubenswrapper[4838]: I0202 11:10:35.953054 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-92wp6"] Feb 02 11:10:35 crc kubenswrapper[4838]: I0202 11:10:35.994807 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7d89g"] Feb 02 11:10:35 crc kubenswrapper[4838]: I0202 11:10:35.996093 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-7d89g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.003090 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7d89g"] Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.113774 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/093bf62e-0c85-4d63-8dd4-4003b7f20122-dns-svc\") pod \"dnsmasq-dns-666b6646f7-7d89g\" (UID: \"093bf62e-0c85-4d63-8dd4-4003b7f20122\") " pod="openstack/dnsmasq-dns-666b6646f7-7d89g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.113840 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8pkv\" (UniqueName: \"kubernetes.io/projected/093bf62e-0c85-4d63-8dd4-4003b7f20122-kube-api-access-n8pkv\") pod \"dnsmasq-dns-666b6646f7-7d89g\" (UID: \"093bf62e-0c85-4d63-8dd4-4003b7f20122\") " pod="openstack/dnsmasq-dns-666b6646f7-7d89g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.113898 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/093bf62e-0c85-4d63-8dd4-4003b7f20122-config\") pod \"dnsmasq-dns-666b6646f7-7d89g\" (UID: \"093bf62e-0c85-4d63-8dd4-4003b7f20122\") " pod="openstack/dnsmasq-dns-666b6646f7-7d89g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.214972 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/093bf62e-0c85-4d63-8dd4-4003b7f20122-dns-svc\") pod \"dnsmasq-dns-666b6646f7-7d89g\" (UID: \"093bf62e-0c85-4d63-8dd4-4003b7f20122\") " pod="openstack/dnsmasq-dns-666b6646f7-7d89g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.215048 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8pkv\" (UniqueName: \"kubernetes.io/projected/093bf62e-0c85-4d63-8dd4-4003b7f20122-kube-api-access-n8pkv\") pod \"dnsmasq-dns-666b6646f7-7d89g\" (UID: \"093bf62e-0c85-4d63-8dd4-4003b7f20122\") " pod="openstack/dnsmasq-dns-666b6646f7-7d89g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.215106 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/093bf62e-0c85-4d63-8dd4-4003b7f20122-config\") pod \"dnsmasq-dns-666b6646f7-7d89g\" (UID: \"093bf62e-0c85-4d63-8dd4-4003b7f20122\") " pod="openstack/dnsmasq-dns-666b6646f7-7d89g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.216258 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/093bf62e-0c85-4d63-8dd4-4003b7f20122-config\") pod \"dnsmasq-dns-666b6646f7-7d89g\" (UID: \"093bf62e-0c85-4d63-8dd4-4003b7f20122\") " pod="openstack/dnsmasq-dns-666b6646f7-7d89g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.216547 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/093bf62e-0c85-4d63-8dd4-4003b7f20122-dns-svc\") pod \"dnsmasq-dns-666b6646f7-7d89g\" (UID: \"093bf62e-0c85-4d63-8dd4-4003b7f20122\") " pod="openstack/dnsmasq-dns-666b6646f7-7d89g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.227083 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-5lnnj"] Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.249321 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-kf66g"] Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.250439 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-kf66g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.265522 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-kf66g"] Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.278172 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8pkv\" (UniqueName: \"kubernetes.io/projected/093bf62e-0c85-4d63-8dd4-4003b7f20122-kube-api-access-n8pkv\") pod \"dnsmasq-dns-666b6646f7-7d89g\" (UID: \"093bf62e-0c85-4d63-8dd4-4003b7f20122\") " pod="openstack/dnsmasq-dns-666b6646f7-7d89g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.316368 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxvs8\" (UniqueName: \"kubernetes.io/projected/6ef67aca-5d1e-42eb-a108-ed5db869c6fe-kube-api-access-wxvs8\") pod \"dnsmasq-dns-57d769cc4f-kf66g\" (UID: \"6ef67aca-5d1e-42eb-a108-ed5db869c6fe\") " pod="openstack/dnsmasq-dns-57d769cc4f-kf66g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.316499 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ef67aca-5d1e-42eb-a108-ed5db869c6fe-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-kf66g\" (UID: \"6ef67aca-5d1e-42eb-a108-ed5db869c6fe\") " pod="openstack/dnsmasq-dns-57d769cc4f-kf66g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.316538 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ef67aca-5d1e-42eb-a108-ed5db869c6fe-config\") pod \"dnsmasq-dns-57d769cc4f-kf66g\" (UID: \"6ef67aca-5d1e-42eb-a108-ed5db869c6fe\") " pod="openstack/dnsmasq-dns-57d769cc4f-kf66g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.323869 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-7d89g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.420182 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxvs8\" (UniqueName: \"kubernetes.io/projected/6ef67aca-5d1e-42eb-a108-ed5db869c6fe-kube-api-access-wxvs8\") pod \"dnsmasq-dns-57d769cc4f-kf66g\" (UID: \"6ef67aca-5d1e-42eb-a108-ed5db869c6fe\") " pod="openstack/dnsmasq-dns-57d769cc4f-kf66g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.420313 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ef67aca-5d1e-42eb-a108-ed5db869c6fe-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-kf66g\" (UID: \"6ef67aca-5d1e-42eb-a108-ed5db869c6fe\") " pod="openstack/dnsmasq-dns-57d769cc4f-kf66g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.420351 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ef67aca-5d1e-42eb-a108-ed5db869c6fe-config\") pod \"dnsmasq-dns-57d769cc4f-kf66g\" (UID: \"6ef67aca-5d1e-42eb-a108-ed5db869c6fe\") " pod="openstack/dnsmasq-dns-57d769cc4f-kf66g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.426964 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ef67aca-5d1e-42eb-a108-ed5db869c6fe-config\") pod \"dnsmasq-dns-57d769cc4f-kf66g\" (UID: \"6ef67aca-5d1e-42eb-a108-ed5db869c6fe\") " pod="openstack/dnsmasq-dns-57d769cc4f-kf66g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.427831 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ef67aca-5d1e-42eb-a108-ed5db869c6fe-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-kf66g\" (UID: \"6ef67aca-5d1e-42eb-a108-ed5db869c6fe\") " pod="openstack/dnsmasq-dns-57d769cc4f-kf66g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.446963 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxvs8\" (UniqueName: \"kubernetes.io/projected/6ef67aca-5d1e-42eb-a108-ed5db869c6fe-kube-api-access-wxvs8\") pod \"dnsmasq-dns-57d769cc4f-kf66g\" (UID: \"6ef67aca-5d1e-42eb-a108-ed5db869c6fe\") " pod="openstack/dnsmasq-dns-57d769cc4f-kf66g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.606943 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-kf66g" Feb 02 11:10:36 crc kubenswrapper[4838]: I0202 11:10:36.823485 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7d89g"] Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.086973 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-kf66g"] Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.111414 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.114729 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.117445 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.118212 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-wr5z9" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.118409 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.118548 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.118889 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.118921 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.119119 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.126587 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.231177 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.231215 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.231270 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.231362 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.231425 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.231450 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-config-data\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.231474 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.231494 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.231563 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.231682 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.231718 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bn88t\" (UniqueName: \"kubernetes.io/projected/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-kube-api-access-bn88t\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.352559 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.352610 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.352671 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.352707 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.354238 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bn88t\" (UniqueName: \"kubernetes.io/projected/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-kube-api-access-bn88t\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.353414 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.354190 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.353520 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.354327 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.354376 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.354407 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.354437 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.354481 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.354515 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-config-data\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.354881 4838 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.355598 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.356259 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-config-data\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.359153 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.359872 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.369337 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.380318 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.381835 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.383886 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.384787 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.388811 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bn88t\" (UniqueName: \"kubernetes.io/projected/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-kube-api-access-bn88t\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.389834 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.389989 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.389876 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.390396 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.390735 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.391075 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-25g28" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.391221 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.399468 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.440763 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.458885 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.458939 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.458976 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.459141 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/10f55730-6ea0-4989-a006-b0549f5566a7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.459207 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.459236 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/10f55730-6ea0-4989-a006-b0549f5566a7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.459274 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.459322 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmtv6\" (UniqueName: \"kubernetes.io/projected/10f55730-6ea0-4989-a006-b0549f5566a7-kube-api-access-rmtv6\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.459438 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/10f55730-6ea0-4989-a006-b0549f5566a7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.459473 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/10f55730-6ea0-4989-a006-b0549f5566a7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.459496 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/10f55730-6ea0-4989-a006-b0549f5566a7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.562542 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.562585 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.562611 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.562656 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/10f55730-6ea0-4989-a006-b0549f5566a7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.562688 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.562706 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/10f55730-6ea0-4989-a006-b0549f5566a7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.562723 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.562745 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmtv6\" (UniqueName: \"kubernetes.io/projected/10f55730-6ea0-4989-a006-b0549f5566a7-kube-api-access-rmtv6\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.562742 4838 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.563266 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.563579 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/10f55730-6ea0-4989-a006-b0549f5566a7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.563647 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/10f55730-6ea0-4989-a006-b0549f5566a7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.563678 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/10f55730-6ea0-4989-a006-b0549f5566a7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.564936 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/10f55730-6ea0-4989-a006-b0549f5566a7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.565222 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.565724 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/10f55730-6ea0-4989-a006-b0549f5566a7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.566271 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/10f55730-6ea0-4989-a006-b0549f5566a7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.570484 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/10f55730-6ea0-4989-a006-b0549f5566a7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.572997 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.574457 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.576601 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/10f55730-6ea0-4989-a006-b0549f5566a7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.590723 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmtv6\" (UniqueName: \"kubernetes.io/projected/10f55730-6ea0-4989-a006-b0549f5566a7-kube-api-access-rmtv6\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.610919 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:37 crc kubenswrapper[4838]: I0202 11:10:37.770129 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.716800 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.720879 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.723309 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.723599 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-wpwgh" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.727605 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.727869 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.736388 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.737347 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.783274 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b42feb4-a718-4036-be9e-3113b97680c4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.783325 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b42feb4-a718-4036-be9e-3113b97680c4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.783364 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5b42feb4-a718-4036-be9e-3113b97680c4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.783456 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.783483 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b42feb4-a718-4036-be9e-3113b97680c4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.783530 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5b42feb4-a718-4036-be9e-3113b97680c4-config-data-default\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.783576 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b97rk\" (UniqueName: \"kubernetes.io/projected/5b42feb4-a718-4036-be9e-3113b97680c4-kube-api-access-b97rk\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.783609 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5b42feb4-a718-4036-be9e-3113b97680c4-kolla-config\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.885283 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.885332 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b42feb4-a718-4036-be9e-3113b97680c4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.885370 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5b42feb4-a718-4036-be9e-3113b97680c4-config-data-default\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.885405 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b97rk\" (UniqueName: \"kubernetes.io/projected/5b42feb4-a718-4036-be9e-3113b97680c4-kube-api-access-b97rk\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.885428 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5b42feb4-a718-4036-be9e-3113b97680c4-kolla-config\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.885446 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b42feb4-a718-4036-be9e-3113b97680c4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.885463 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b42feb4-a718-4036-be9e-3113b97680c4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.885489 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5b42feb4-a718-4036-be9e-3113b97680c4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.885959 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5b42feb4-a718-4036-be9e-3113b97680c4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.886277 4838 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.887200 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5b42feb4-a718-4036-be9e-3113b97680c4-kolla-config\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.887637 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b42feb4-a718-4036-be9e-3113b97680c4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.888249 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5b42feb4-a718-4036-be9e-3113b97680c4-config-data-default\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.899311 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b42feb4-a718-4036-be9e-3113b97680c4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.900032 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b42feb4-a718-4036-be9e-3113b97680c4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.905227 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b97rk\" (UniqueName: \"kubernetes.io/projected/5b42feb4-a718-4036-be9e-3113b97680c4-kube-api-access-b97rk\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:38 crc kubenswrapper[4838]: I0202 11:10:38.915087 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"5b42feb4-a718-4036-be9e-3113b97680c4\") " pod="openstack/openstack-galera-0" Feb 02 11:10:39 crc kubenswrapper[4838]: I0202 11:10:39.049034 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 02 11:10:39 crc kubenswrapper[4838]: W0202 11:10:39.946052 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6ef67aca_5d1e_42eb_a108_ed5db869c6fe.slice/crio-d418ecc3121b2943b1cfb458db2c3973d3f5598a2ca240e347c7c2e2d56290c7 WatchSource:0}: Error finding container d418ecc3121b2943b1cfb458db2c3973d3f5598a2ca240e347c7c2e2d56290c7: Status 404 returned error can't find the container with id d418ecc3121b2943b1cfb458db2c3973d3f5598a2ca240e347c7c2e2d56290c7 Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.181358 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.185044 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.187847 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-d99cx" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.188081 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.188913 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.189809 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.194644 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.209501 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-7d89g" event={"ID":"093bf62e-0c85-4d63-8dd4-4003b7f20122","Type":"ContainerStarted","Data":"73524f146fe6f7958e3cdad95c2b9b0689e52376da965733b2fb01e12f423961"} Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.211811 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-kf66g" event={"ID":"6ef67aca-5d1e-42eb-a108-ed5db869c6fe","Type":"ContainerStarted","Data":"d418ecc3121b2943b1cfb458db2c3973d3f5598a2ca240e347c7c2e2d56290c7"} Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.314398 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2f841e0c-b40f-4dd1-8427-ea07840bcdf6-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.314442 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f841e0c-b40f-4dd1-8427-ea07840bcdf6-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.314464 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mtmm\" (UniqueName: \"kubernetes.io/projected/2f841e0c-b40f-4dd1-8427-ea07840bcdf6-kube-api-access-6mtmm\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.314538 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.314555 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f841e0c-b40f-4dd1-8427-ea07840bcdf6-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.314577 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f841e0c-b40f-4dd1-8427-ea07840bcdf6-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.314637 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2f841e0c-b40f-4dd1-8427-ea07840bcdf6-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.314675 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2f841e0c-b40f-4dd1-8427-ea07840bcdf6-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.415591 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2f841e0c-b40f-4dd1-8427-ea07840bcdf6-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.415648 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f841e0c-b40f-4dd1-8427-ea07840bcdf6-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.415674 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mtmm\" (UniqueName: \"kubernetes.io/projected/2f841e0c-b40f-4dd1-8427-ea07840bcdf6-kube-api-access-6mtmm\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.415712 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.415728 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f841e0c-b40f-4dd1-8427-ea07840bcdf6-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.415746 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f841e0c-b40f-4dd1-8427-ea07840bcdf6-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.415785 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2f841e0c-b40f-4dd1-8427-ea07840bcdf6-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.415813 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2f841e0c-b40f-4dd1-8427-ea07840bcdf6-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.415992 4838 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.416219 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2f841e0c-b40f-4dd1-8427-ea07840bcdf6-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.416428 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2f841e0c-b40f-4dd1-8427-ea07840bcdf6-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.416897 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2f841e0c-b40f-4dd1-8427-ea07840bcdf6-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.417819 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f841e0c-b40f-4dd1-8427-ea07840bcdf6-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.422859 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f841e0c-b40f-4dd1-8427-ea07840bcdf6-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.435403 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mtmm\" (UniqueName: \"kubernetes.io/projected/2f841e0c-b40f-4dd1-8427-ea07840bcdf6-kube-api-access-6mtmm\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.437434 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f841e0c-b40f-4dd1-8427-ea07840bcdf6-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.443207 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"2f841e0c-b40f-4dd1-8427-ea07840bcdf6\") " pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.501441 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.559870 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.562180 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.565802 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.565886 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-js8rk" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.565804 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.589083 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.622337 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/605eae6b-dbaf-4781-97bb-2ef09397141d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"605eae6b-dbaf-4781-97bb-2ef09397141d\") " pod="openstack/memcached-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.622423 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/605eae6b-dbaf-4781-97bb-2ef09397141d-config-data\") pod \"memcached-0\" (UID: \"605eae6b-dbaf-4781-97bb-2ef09397141d\") " pod="openstack/memcached-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.622473 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/605eae6b-dbaf-4781-97bb-2ef09397141d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"605eae6b-dbaf-4781-97bb-2ef09397141d\") " pod="openstack/memcached-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.622502 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zs27p\" (UniqueName: \"kubernetes.io/projected/605eae6b-dbaf-4781-97bb-2ef09397141d-kube-api-access-zs27p\") pod \"memcached-0\" (UID: \"605eae6b-dbaf-4781-97bb-2ef09397141d\") " pod="openstack/memcached-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.622528 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/605eae6b-dbaf-4781-97bb-2ef09397141d-kolla-config\") pod \"memcached-0\" (UID: \"605eae6b-dbaf-4781-97bb-2ef09397141d\") " pod="openstack/memcached-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.724111 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/605eae6b-dbaf-4781-97bb-2ef09397141d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"605eae6b-dbaf-4781-97bb-2ef09397141d\") " pod="openstack/memcached-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.724178 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/605eae6b-dbaf-4781-97bb-2ef09397141d-config-data\") pod \"memcached-0\" (UID: \"605eae6b-dbaf-4781-97bb-2ef09397141d\") " pod="openstack/memcached-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.724226 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/605eae6b-dbaf-4781-97bb-2ef09397141d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"605eae6b-dbaf-4781-97bb-2ef09397141d\") " pod="openstack/memcached-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.724258 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zs27p\" (UniqueName: \"kubernetes.io/projected/605eae6b-dbaf-4781-97bb-2ef09397141d-kube-api-access-zs27p\") pod \"memcached-0\" (UID: \"605eae6b-dbaf-4781-97bb-2ef09397141d\") " pod="openstack/memcached-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.724282 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/605eae6b-dbaf-4781-97bb-2ef09397141d-kolla-config\") pod \"memcached-0\" (UID: \"605eae6b-dbaf-4781-97bb-2ef09397141d\") " pod="openstack/memcached-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.724931 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/605eae6b-dbaf-4781-97bb-2ef09397141d-config-data\") pod \"memcached-0\" (UID: \"605eae6b-dbaf-4781-97bb-2ef09397141d\") " pod="openstack/memcached-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.725234 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/605eae6b-dbaf-4781-97bb-2ef09397141d-kolla-config\") pod \"memcached-0\" (UID: \"605eae6b-dbaf-4781-97bb-2ef09397141d\") " pod="openstack/memcached-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.727761 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/605eae6b-dbaf-4781-97bb-2ef09397141d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"605eae6b-dbaf-4781-97bb-2ef09397141d\") " pod="openstack/memcached-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.729139 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/605eae6b-dbaf-4781-97bb-2ef09397141d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"605eae6b-dbaf-4781-97bb-2ef09397141d\") " pod="openstack/memcached-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.739529 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zs27p\" (UniqueName: \"kubernetes.io/projected/605eae6b-dbaf-4781-97bb-2ef09397141d-kube-api-access-zs27p\") pod \"memcached-0\" (UID: \"605eae6b-dbaf-4781-97bb-2ef09397141d\") " pod="openstack/memcached-0" Feb 02 11:10:40 crc kubenswrapper[4838]: I0202 11:10:40.878458 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 02 11:10:42 crc kubenswrapper[4838]: I0202 11:10:42.564304 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 11:10:42 crc kubenswrapper[4838]: I0202 11:10:42.565358 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 11:10:42 crc kubenswrapper[4838]: I0202 11:10:42.573403 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-kc74s" Feb 02 11:10:42 crc kubenswrapper[4838]: I0202 11:10:42.575973 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 11:10:42 crc kubenswrapper[4838]: I0202 11:10:42.652039 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5w7jg\" (UniqueName: \"kubernetes.io/projected/039ec177-4229-482e-aeec-ec3db4349951-kube-api-access-5w7jg\") pod \"kube-state-metrics-0\" (UID: \"039ec177-4229-482e-aeec-ec3db4349951\") " pod="openstack/kube-state-metrics-0" Feb 02 11:10:42 crc kubenswrapper[4838]: I0202 11:10:42.753881 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5w7jg\" (UniqueName: \"kubernetes.io/projected/039ec177-4229-482e-aeec-ec3db4349951-kube-api-access-5w7jg\") pod \"kube-state-metrics-0\" (UID: \"039ec177-4229-482e-aeec-ec3db4349951\") " pod="openstack/kube-state-metrics-0" Feb 02 11:10:42 crc kubenswrapper[4838]: I0202 11:10:42.771905 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5w7jg\" (UniqueName: \"kubernetes.io/projected/039ec177-4229-482e-aeec-ec3db4349951-kube-api-access-5w7jg\") pod \"kube-state-metrics-0\" (UID: \"039ec177-4229-482e-aeec-ec3db4349951\") " pod="openstack/kube-state-metrics-0" Feb 02 11:10:42 crc kubenswrapper[4838]: I0202 11:10:42.885238 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.019042 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-78llm"] Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.023107 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.030642 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.031539 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-8hfww" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.031563 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.048114 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-gmk9d"] Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.054935 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.062677 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-78llm"] Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.118634 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/89d377c3-4929-47c4-abc7-53bb5e058025-scripts\") pod \"ovn-controller-78llm\" (UID: \"89d377c3-4929-47c4-abc7-53bb5e058025\") " pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.118709 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/6201c1a8-a058-4029-ac96-17f4500b9fc0-var-lib\") pod \"ovn-controller-ovs-gmk9d\" (UID: \"6201c1a8-a058-4029-ac96-17f4500b9fc0\") " pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.118733 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/89d377c3-4929-47c4-abc7-53bb5e058025-var-run\") pod \"ovn-controller-78llm\" (UID: \"89d377c3-4929-47c4-abc7-53bb5e058025\") " pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.118754 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89d377c3-4929-47c4-abc7-53bb5e058025-combined-ca-bundle\") pod \"ovn-controller-78llm\" (UID: \"89d377c3-4929-47c4-abc7-53bb5e058025\") " pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.118784 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/6201c1a8-a058-4029-ac96-17f4500b9fc0-etc-ovs\") pod \"ovn-controller-ovs-gmk9d\" (UID: \"6201c1a8-a058-4029-ac96-17f4500b9fc0\") " pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.118804 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkxld\" (UniqueName: \"kubernetes.io/projected/89d377c3-4929-47c4-abc7-53bb5e058025-kube-api-access-qkxld\") pod \"ovn-controller-78llm\" (UID: \"89d377c3-4929-47c4-abc7-53bb5e058025\") " pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.118823 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/89d377c3-4929-47c4-abc7-53bb5e058025-var-run-ovn\") pod \"ovn-controller-78llm\" (UID: \"89d377c3-4929-47c4-abc7-53bb5e058025\") " pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.118845 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/6201c1a8-a058-4029-ac96-17f4500b9fc0-var-log\") pod \"ovn-controller-ovs-gmk9d\" (UID: \"6201c1a8-a058-4029-ac96-17f4500b9fc0\") " pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.118863 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/89d377c3-4929-47c4-abc7-53bb5e058025-ovn-controller-tls-certs\") pod \"ovn-controller-78llm\" (UID: \"89d377c3-4929-47c4-abc7-53bb5e058025\") " pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.118904 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mnp2\" (UniqueName: \"kubernetes.io/projected/6201c1a8-a058-4029-ac96-17f4500b9fc0-kube-api-access-7mnp2\") pod \"ovn-controller-ovs-gmk9d\" (UID: \"6201c1a8-a058-4029-ac96-17f4500b9fc0\") " pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.118922 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6201c1a8-a058-4029-ac96-17f4500b9fc0-var-run\") pod \"ovn-controller-ovs-gmk9d\" (UID: \"6201c1a8-a058-4029-ac96-17f4500b9fc0\") " pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.118938 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6201c1a8-a058-4029-ac96-17f4500b9fc0-scripts\") pod \"ovn-controller-ovs-gmk9d\" (UID: \"6201c1a8-a058-4029-ac96-17f4500b9fc0\") " pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.118970 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/89d377c3-4929-47c4-abc7-53bb5e058025-var-log-ovn\") pod \"ovn-controller-78llm\" (UID: \"89d377c3-4929-47c4-abc7-53bb5e058025\") " pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.121811 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-gmk9d"] Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.220970 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/89d377c3-4929-47c4-abc7-53bb5e058025-scripts\") pod \"ovn-controller-78llm\" (UID: \"89d377c3-4929-47c4-abc7-53bb5e058025\") " pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.221037 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/6201c1a8-a058-4029-ac96-17f4500b9fc0-var-lib\") pod \"ovn-controller-ovs-gmk9d\" (UID: \"6201c1a8-a058-4029-ac96-17f4500b9fc0\") " pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.221074 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/89d377c3-4929-47c4-abc7-53bb5e058025-var-run\") pod \"ovn-controller-78llm\" (UID: \"89d377c3-4929-47c4-abc7-53bb5e058025\") " pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.221104 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89d377c3-4929-47c4-abc7-53bb5e058025-combined-ca-bundle\") pod \"ovn-controller-78llm\" (UID: \"89d377c3-4929-47c4-abc7-53bb5e058025\") " pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.221139 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/6201c1a8-a058-4029-ac96-17f4500b9fc0-etc-ovs\") pod \"ovn-controller-ovs-gmk9d\" (UID: \"6201c1a8-a058-4029-ac96-17f4500b9fc0\") " pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.221169 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkxld\" (UniqueName: \"kubernetes.io/projected/89d377c3-4929-47c4-abc7-53bb5e058025-kube-api-access-qkxld\") pod \"ovn-controller-78llm\" (UID: \"89d377c3-4929-47c4-abc7-53bb5e058025\") " pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.221190 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/89d377c3-4929-47c4-abc7-53bb5e058025-var-run-ovn\") pod \"ovn-controller-78llm\" (UID: \"89d377c3-4929-47c4-abc7-53bb5e058025\") " pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.221216 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/89d377c3-4929-47c4-abc7-53bb5e058025-ovn-controller-tls-certs\") pod \"ovn-controller-78llm\" (UID: \"89d377c3-4929-47c4-abc7-53bb5e058025\") " pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.221239 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/6201c1a8-a058-4029-ac96-17f4500b9fc0-var-log\") pod \"ovn-controller-ovs-gmk9d\" (UID: \"6201c1a8-a058-4029-ac96-17f4500b9fc0\") " pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.221285 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mnp2\" (UniqueName: \"kubernetes.io/projected/6201c1a8-a058-4029-ac96-17f4500b9fc0-kube-api-access-7mnp2\") pod \"ovn-controller-ovs-gmk9d\" (UID: \"6201c1a8-a058-4029-ac96-17f4500b9fc0\") " pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.221316 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6201c1a8-a058-4029-ac96-17f4500b9fc0-var-run\") pod \"ovn-controller-ovs-gmk9d\" (UID: \"6201c1a8-a058-4029-ac96-17f4500b9fc0\") " pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.221346 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6201c1a8-a058-4029-ac96-17f4500b9fc0-scripts\") pod \"ovn-controller-ovs-gmk9d\" (UID: \"6201c1a8-a058-4029-ac96-17f4500b9fc0\") " pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.221387 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/89d377c3-4929-47c4-abc7-53bb5e058025-var-log-ovn\") pod \"ovn-controller-78llm\" (UID: \"89d377c3-4929-47c4-abc7-53bb5e058025\") " pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.222590 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/89d377c3-4929-47c4-abc7-53bb5e058025-var-run-ovn\") pod \"ovn-controller-78llm\" (UID: \"89d377c3-4929-47c4-abc7-53bb5e058025\") " pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.222814 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/6201c1a8-a058-4029-ac96-17f4500b9fc0-var-lib\") pod \"ovn-controller-ovs-gmk9d\" (UID: \"6201c1a8-a058-4029-ac96-17f4500b9fc0\") " pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.222921 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/89d377c3-4929-47c4-abc7-53bb5e058025-var-run\") pod \"ovn-controller-78llm\" (UID: \"89d377c3-4929-47c4-abc7-53bb5e058025\") " pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.222985 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/89d377c3-4929-47c4-abc7-53bb5e058025-scripts\") pod \"ovn-controller-78llm\" (UID: \"89d377c3-4929-47c4-abc7-53bb5e058025\") " pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.223109 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/6201c1a8-a058-4029-ac96-17f4500b9fc0-etc-ovs\") pod \"ovn-controller-ovs-gmk9d\" (UID: \"6201c1a8-a058-4029-ac96-17f4500b9fc0\") " pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.223893 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/6201c1a8-a058-4029-ac96-17f4500b9fc0-var-log\") pod \"ovn-controller-ovs-gmk9d\" (UID: \"6201c1a8-a058-4029-ac96-17f4500b9fc0\") " pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.223956 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6201c1a8-a058-4029-ac96-17f4500b9fc0-var-run\") pod \"ovn-controller-ovs-gmk9d\" (UID: \"6201c1a8-a058-4029-ac96-17f4500b9fc0\") " pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.224962 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/89d377c3-4929-47c4-abc7-53bb5e058025-var-log-ovn\") pod \"ovn-controller-78llm\" (UID: \"89d377c3-4929-47c4-abc7-53bb5e058025\") " pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.226065 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6201c1a8-a058-4029-ac96-17f4500b9fc0-scripts\") pod \"ovn-controller-ovs-gmk9d\" (UID: \"6201c1a8-a058-4029-ac96-17f4500b9fc0\") " pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.228221 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89d377c3-4929-47c4-abc7-53bb5e058025-combined-ca-bundle\") pod \"ovn-controller-78llm\" (UID: \"89d377c3-4929-47c4-abc7-53bb5e058025\") " pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.239740 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mnp2\" (UniqueName: \"kubernetes.io/projected/6201c1a8-a058-4029-ac96-17f4500b9fc0-kube-api-access-7mnp2\") pod \"ovn-controller-ovs-gmk9d\" (UID: \"6201c1a8-a058-4029-ac96-17f4500b9fc0\") " pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.241643 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/89d377c3-4929-47c4-abc7-53bb5e058025-ovn-controller-tls-certs\") pod \"ovn-controller-78llm\" (UID: \"89d377c3-4929-47c4-abc7-53bb5e058025\") " pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.243523 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkxld\" (UniqueName: \"kubernetes.io/projected/89d377c3-4929-47c4-abc7-53bb5e058025-kube-api-access-qkxld\") pod \"ovn-controller-78llm\" (UID: \"89d377c3-4929-47c4-abc7-53bb5e058025\") " pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.402313 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-78llm" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.414930 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.563604 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.565646 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.567847 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.568459 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.568528 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-8756w" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.568806 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.570472 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.587766 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.630506 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbb8b7a6-28d0-40fa-bdcb-fe95357c8018-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.630717 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbb8b7a6-28d0-40fa-bdcb-fe95357c8018-config\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.630767 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l472n\" (UniqueName: \"kubernetes.io/projected/bbb8b7a6-28d0-40fa-bdcb-fe95357c8018-kube-api-access-l472n\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.631170 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bbb8b7a6-28d0-40fa-bdcb-fe95357c8018-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.631408 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.631756 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbb8b7a6-28d0-40fa-bdcb-fe95357c8018-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.631822 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bbb8b7a6-28d0-40fa-bdcb-fe95357c8018-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.631858 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbb8b7a6-28d0-40fa-bdcb-fe95357c8018-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.733127 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bbb8b7a6-28d0-40fa-bdcb-fe95357c8018-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.733208 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.733307 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbb8b7a6-28d0-40fa-bdcb-fe95357c8018-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.733371 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbb8b7a6-28d0-40fa-bdcb-fe95357c8018-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.733393 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bbb8b7a6-28d0-40fa-bdcb-fe95357c8018-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.733446 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbb8b7a6-28d0-40fa-bdcb-fe95357c8018-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.733473 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbb8b7a6-28d0-40fa-bdcb-fe95357c8018-config\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.733508 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l472n\" (UniqueName: \"kubernetes.io/projected/bbb8b7a6-28d0-40fa-bdcb-fe95357c8018-kube-api-access-l472n\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.733710 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bbb8b7a6-28d0-40fa-bdcb-fe95357c8018-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.734759 4838 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.735072 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbb8b7a6-28d0-40fa-bdcb-fe95357c8018-config\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.735291 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bbb8b7a6-28d0-40fa-bdcb-fe95357c8018-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.748413 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbb8b7a6-28d0-40fa-bdcb-fe95357c8018-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.749781 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbb8b7a6-28d0-40fa-bdcb-fe95357c8018-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.750222 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbb8b7a6-28d0-40fa-bdcb-fe95357c8018-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.751634 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l472n\" (UniqueName: \"kubernetes.io/projected/bbb8b7a6-28d0-40fa-bdcb-fe95357c8018-kube-api-access-l472n\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.753687 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018\") " pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:46 crc kubenswrapper[4838]: I0202 11:10:46.898290 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.821609 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.826591 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.837519 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-d6l65" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.838726 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.839108 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.839845 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.876405 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.892278 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vn9rb\" (UniqueName: \"kubernetes.io/projected/a8af70b4-152f-4edb-a4c5-afc8baed3685-kube-api-access-vn9rb\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.892326 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8af70b4-152f-4edb-a4c5-afc8baed3685-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.892410 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a8af70b4-152f-4edb-a4c5-afc8baed3685-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.892456 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a8af70b4-152f-4edb-a4c5-afc8baed3685-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.892507 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.892532 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8af70b4-152f-4edb-a4c5-afc8baed3685-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.892552 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8af70b4-152f-4edb-a4c5-afc8baed3685-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.892578 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8af70b4-152f-4edb-a4c5-afc8baed3685-config\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:49 crc kubenswrapper[4838]: E0202 11:10:49.917572 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Feb 02 11:10:49 crc kubenswrapper[4838]: E0202 11:10:49.917786 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9rhhp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-92wp6_openstack(82d26ae8-23be-4861-9384-59cb3e20640d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:10:49 crc kubenswrapper[4838]: E0202 11:10:49.919692 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-92wp6" podUID="82d26ae8-23be-4861-9384-59cb3e20640d" Feb 02 11:10:49 crc kubenswrapper[4838]: E0202 11:10:49.925944 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Feb 02 11:10:49 crc kubenswrapper[4838]: E0202 11:10:49.926130 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nr7f7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-5lnnj_openstack(b9ae6407-ad21-4b11-9e2c-ebd7441c5e96): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:10:49 crc kubenswrapper[4838]: E0202 11:10:49.927255 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-5lnnj" podUID="b9ae6407-ad21-4b11-9e2c-ebd7441c5e96" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.995782 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.995858 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8af70b4-152f-4edb-a4c5-afc8baed3685-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.995890 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8af70b4-152f-4edb-a4c5-afc8baed3685-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.995950 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8af70b4-152f-4edb-a4c5-afc8baed3685-config\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.996015 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vn9rb\" (UniqueName: \"kubernetes.io/projected/a8af70b4-152f-4edb-a4c5-afc8baed3685-kube-api-access-vn9rb\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.996041 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8af70b4-152f-4edb-a4c5-afc8baed3685-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.996117 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a8af70b4-152f-4edb-a4c5-afc8baed3685-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.996187 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a8af70b4-152f-4edb-a4c5-afc8baed3685-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.998303 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8af70b4-152f-4edb-a4c5-afc8baed3685-config\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:49 crc kubenswrapper[4838]: I0202 11:10:49.998561 4838 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.000994 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a8af70b4-152f-4edb-a4c5-afc8baed3685-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.004874 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a8af70b4-152f-4edb-a4c5-afc8baed3685-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.011670 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8af70b4-152f-4edb-a4c5-afc8baed3685-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.027729 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8af70b4-152f-4edb-a4c5-afc8baed3685-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.031829 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8af70b4-152f-4edb-a4c5-afc8baed3685-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.058962 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.068487 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vn9rb\" (UniqueName: \"kubernetes.io/projected/a8af70b4-152f-4edb-a4c5-afc8baed3685-kube-api-access-vn9rb\") pod \"ovsdbserver-sb-0\" (UID: \"a8af70b4-152f-4edb-a4c5-afc8baed3685\") " pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.247049 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.362482 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.451477 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.459496 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 11:10:50 crc kubenswrapper[4838]: W0202 11:10:50.466305 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod698d5f5d_683c_4130_8d4f_d1d59b5d32e4.slice/crio-20eef4683ab1fdb6d7b507d1690c2129630d8517a6e47c040711b0e7bf93e35a WatchSource:0}: Error finding container 20eef4683ab1fdb6d7b507d1690c2129630d8517a6e47c040711b0e7bf93e35a: Status 404 returned error can't find the container with id 20eef4683ab1fdb6d7b507d1690c2129630d8517a6e47c040711b0e7bf93e35a Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.797743 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-5lnnj" Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.815590 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-92wp6" Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.825629 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.845898 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 11:10:50 crc kubenswrapper[4838]: W0202 11:10:50.865566 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89d377c3_4929_47c4_abc7_53bb5e058025.slice/crio-6878750c9e19a6b2e60c488b07bf5d4585106b4735c1213c8ccff1dcec8d2f84 WatchSource:0}: Error finding container 6878750c9e19a6b2e60c488b07bf5d4585106b4735c1213c8ccff1dcec8d2f84: Status 404 returned error can't find the container with id 6878750c9e19a6b2e60c488b07bf5d4585106b4735c1213c8ccff1dcec8d2f84 Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.867453 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-78llm"] Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.873642 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Feb 02 11:10:50 crc kubenswrapper[4838]: W0202 11:10:50.878882 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod605eae6b_dbaf_4781_97bb_2ef09397141d.slice/crio-f6a122c2f1700904cc891a79f61a7f3644c051dc958dfd3212a9cacfb7233562 WatchSource:0}: Error finding container f6a122c2f1700904cc891a79f61a7f3644c051dc958dfd3212a9cacfb7233562: Status 404 returned error can't find the container with id f6a122c2f1700904cc891a79f61a7f3644c051dc958dfd3212a9cacfb7233562 Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.914440 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82d26ae8-23be-4861-9384-59cb3e20640d-config\") pod \"82d26ae8-23be-4861-9384-59cb3e20640d\" (UID: \"82d26ae8-23be-4861-9384-59cb3e20640d\") " Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.914486 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rhhp\" (UniqueName: \"kubernetes.io/projected/82d26ae8-23be-4861-9384-59cb3e20640d-kube-api-access-9rhhp\") pod \"82d26ae8-23be-4861-9384-59cb3e20640d\" (UID: \"82d26ae8-23be-4861-9384-59cb3e20640d\") " Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.914519 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nr7f7\" (UniqueName: \"kubernetes.io/projected/b9ae6407-ad21-4b11-9e2c-ebd7441c5e96-kube-api-access-nr7f7\") pod \"b9ae6407-ad21-4b11-9e2c-ebd7441c5e96\" (UID: \"b9ae6407-ad21-4b11-9e2c-ebd7441c5e96\") " Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.914574 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b9ae6407-ad21-4b11-9e2c-ebd7441c5e96-dns-svc\") pod \"b9ae6407-ad21-4b11-9e2c-ebd7441c5e96\" (UID: \"b9ae6407-ad21-4b11-9e2c-ebd7441c5e96\") " Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.914605 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9ae6407-ad21-4b11-9e2c-ebd7441c5e96-config\") pod \"b9ae6407-ad21-4b11-9e2c-ebd7441c5e96\" (UID: \"b9ae6407-ad21-4b11-9e2c-ebd7441c5e96\") " Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.915608 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82d26ae8-23be-4861-9384-59cb3e20640d-config" (OuterVolumeSpecName: "config") pod "82d26ae8-23be-4861-9384-59cb3e20640d" (UID: "82d26ae8-23be-4861-9384-59cb3e20640d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.917944 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9ae6407-ad21-4b11-9e2c-ebd7441c5e96-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b9ae6407-ad21-4b11-9e2c-ebd7441c5e96" (UID: "b9ae6407-ad21-4b11-9e2c-ebd7441c5e96"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.918155 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9ae6407-ad21-4b11-9e2c-ebd7441c5e96-config" (OuterVolumeSpecName: "config") pod "b9ae6407-ad21-4b11-9e2c-ebd7441c5e96" (UID: "b9ae6407-ad21-4b11-9e2c-ebd7441c5e96"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.923063 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9ae6407-ad21-4b11-9e2c-ebd7441c5e96-kube-api-access-nr7f7" (OuterVolumeSpecName: "kube-api-access-nr7f7") pod "b9ae6407-ad21-4b11-9e2c-ebd7441c5e96" (UID: "b9ae6407-ad21-4b11-9e2c-ebd7441c5e96"). InnerVolumeSpecName "kube-api-access-nr7f7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:10:50 crc kubenswrapper[4838]: I0202 11:10:50.923376 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82d26ae8-23be-4861-9384-59cb3e20640d-kube-api-access-9rhhp" (OuterVolumeSpecName: "kube-api-access-9rhhp") pod "82d26ae8-23be-4861-9384-59cb3e20640d" (UID: "82d26ae8-23be-4861-9384-59cb3e20640d"). InnerVolumeSpecName "kube-api-access-9rhhp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.011077 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.024146 4838 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b9ae6407-ad21-4b11-9e2c-ebd7441c5e96-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.024198 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9ae6407-ad21-4b11-9e2c-ebd7441c5e96-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.024216 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82d26ae8-23be-4861-9384-59cb3e20640d-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.024226 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rhhp\" (UniqueName: \"kubernetes.io/projected/82d26ae8-23be-4861-9384-59cb3e20640d-kube-api-access-9rhhp\") on node \"crc\" DevicePath \"\"" Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.024239 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nr7f7\" (UniqueName: \"kubernetes.io/projected/b9ae6407-ad21-4b11-9e2c-ebd7441c5e96-kube-api-access-nr7f7\") on node \"crc\" DevicePath \"\"" Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.316366 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"605eae6b-dbaf-4781-97bb-2ef09397141d","Type":"ContainerStarted","Data":"f6a122c2f1700904cc891a79f61a7f3644c051dc958dfd3212a9cacfb7233562"} Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.317859 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018","Type":"ContainerStarted","Data":"6fefd5136bb3256037ab8daf90001240201c864e202674e9d978317af2210178"} Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.318774 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"039ec177-4229-482e-aeec-ec3db4349951","Type":"ContainerStarted","Data":"eb07b2c8863508ae875df9407933d8494ad49e5ca459ba7d09e2f977705d658e"} Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.319703 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"10f55730-6ea0-4989-a006-b0549f5566a7","Type":"ContainerStarted","Data":"7626129137e1c6892f1a74181697a36e8ac860b4ad5264ef50f6178d6b185a66"} Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.320944 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"698d5f5d-683c-4130-8d4f-d1d59b5d32e4","Type":"ContainerStarted","Data":"20eef4683ab1fdb6d7b507d1690c2129630d8517a6e47c040711b0e7bf93e35a"} Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.321855 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5b42feb4-a718-4036-be9e-3113b97680c4","Type":"ContainerStarted","Data":"28d1789021ae4355a55dcccd26b98a9be56621e0fa9fa2d1961f9cf901080658"} Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.322686 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-5lnnj" event={"ID":"b9ae6407-ad21-4b11-9e2c-ebd7441c5e96","Type":"ContainerDied","Data":"0352ce175f716fd85672adb4928a150d1da6077d2abb4c17fb67b6d817130dc7"} Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.322786 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-5lnnj" Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.325836 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-92wp6" event={"ID":"82d26ae8-23be-4861-9384-59cb3e20640d","Type":"ContainerDied","Data":"565f98449556fad39e3d6f947cf9a70a31c94077dec3144d0af77068df5f0943"} Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.325899 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-92wp6" Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.332234 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"2f841e0c-b40f-4dd1-8427-ea07840bcdf6","Type":"ContainerStarted","Data":"6e6dbfad3ec25370f7b1b197290cf427027d5f4033d7e1df43dc1b15480bfb0f"} Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.334157 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-78llm" event={"ID":"89d377c3-4929-47c4-abc7-53bb5e058025","Type":"ContainerStarted","Data":"6878750c9e19a6b2e60c488b07bf5d4585106b4735c1213c8ccff1dcec8d2f84"} Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.394369 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-92wp6"] Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.403180 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-92wp6"] Feb 02 11:10:51 crc kubenswrapper[4838]: E0202 11:10:51.411971 4838 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9ae6407_ad21_4b11_9e2c_ebd7441c5e96.slice\": RecentStats: unable to find data in memory cache]" Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.443061 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-5lnnj"] Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.447795 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-5lnnj"] Feb 02 11:10:51 crc kubenswrapper[4838]: I0202 11:10:51.617091 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-gmk9d"] Feb 02 11:10:52 crc kubenswrapper[4838]: I0202 11:10:52.343048 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-gmk9d" event={"ID":"6201c1a8-a058-4029-ac96-17f4500b9fc0","Type":"ContainerStarted","Data":"b851cbeb708be101946aabb1ad98f76c08d38790cc00e4d431416140f8dd70ca"} Feb 02 11:10:52 crc kubenswrapper[4838]: I0202 11:10:52.521199 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82d26ae8-23be-4861-9384-59cb3e20640d" path="/var/lib/kubelet/pods/82d26ae8-23be-4861-9384-59cb3e20640d/volumes" Feb 02 11:10:52 crc kubenswrapper[4838]: I0202 11:10:52.521557 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9ae6407-ad21-4b11-9e2c-ebd7441c5e96" path="/var/lib/kubelet/pods/b9ae6407-ad21-4b11-9e2c-ebd7441c5e96/volumes" Feb 02 11:10:52 crc kubenswrapper[4838]: W0202 11:10:52.596155 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8af70b4_152f_4edb_a4c5_afc8baed3685.slice/crio-c58680098e500c024052f22d6a406f3d9c1f2eac4dec91707cba67f656dd2efb WatchSource:0}: Error finding container c58680098e500c024052f22d6a406f3d9c1f2eac4dec91707cba67f656dd2efb: Status 404 returned error can't find the container with id c58680098e500c024052f22d6a406f3d9c1f2eac4dec91707cba67f656dd2efb Feb 02 11:10:52 crc kubenswrapper[4838]: I0202 11:10:52.596866 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 02 11:10:53 crc kubenswrapper[4838]: I0202 11:10:53.352584 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"a8af70b4-152f-4edb-a4c5-afc8baed3685","Type":"ContainerStarted","Data":"c58680098e500c024052f22d6a406f3d9c1f2eac4dec91707cba67f656dd2efb"} Feb 02 11:10:53 crc kubenswrapper[4838]: I0202 11:10:53.966158 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-p2lkw"] Feb 02 11:10:53 crc kubenswrapper[4838]: I0202 11:10:53.967666 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-p2lkw" Feb 02 11:10:53 crc kubenswrapper[4838]: I0202 11:10:53.970977 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Feb 02 11:10:53 crc kubenswrapper[4838]: I0202 11:10:53.990792 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-p2lkw"] Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.091129 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e-ovn-rundir\") pod \"ovn-controller-metrics-p2lkw\" (UID: \"1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e\") " pod="openstack/ovn-controller-metrics-p2lkw" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.091213 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e-combined-ca-bundle\") pod \"ovn-controller-metrics-p2lkw\" (UID: \"1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e\") " pod="openstack/ovn-controller-metrics-p2lkw" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.091385 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e-ovs-rundir\") pod \"ovn-controller-metrics-p2lkw\" (UID: \"1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e\") " pod="openstack/ovn-controller-metrics-p2lkw" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.091440 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e-config\") pod \"ovn-controller-metrics-p2lkw\" (UID: \"1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e\") " pod="openstack/ovn-controller-metrics-p2lkw" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.091575 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-p2lkw\" (UID: \"1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e\") " pod="openstack/ovn-controller-metrics-p2lkw" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.091681 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c74g7\" (UniqueName: \"kubernetes.io/projected/1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e-kube-api-access-c74g7\") pod \"ovn-controller-metrics-p2lkw\" (UID: \"1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e\") " pod="openstack/ovn-controller-metrics-p2lkw" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.121262 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7d89g"] Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.157747 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-mtkxv"] Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.159259 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.164946 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.181016 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-mtkxv"] Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.192808 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e-combined-ca-bundle\") pod \"ovn-controller-metrics-p2lkw\" (UID: \"1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e\") " pod="openstack/ovn-controller-metrics-p2lkw" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.192861 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e-ovs-rundir\") pod \"ovn-controller-metrics-p2lkw\" (UID: \"1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e\") " pod="openstack/ovn-controller-metrics-p2lkw" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.192884 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e-config\") pod \"ovn-controller-metrics-p2lkw\" (UID: \"1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e\") " pod="openstack/ovn-controller-metrics-p2lkw" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.192928 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8ce7915-7907-4d58-a404-fb3633c7d792-config\") pod \"dnsmasq-dns-7fd796d7df-mtkxv\" (UID: \"e8ce7915-7907-4d58-a404-fb3633c7d792\") " pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.192948 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-p2lkw\" (UID: \"1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e\") " pod="openstack/ovn-controller-metrics-p2lkw" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.192979 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c74g7\" (UniqueName: \"kubernetes.io/projected/1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e-kube-api-access-c74g7\") pod \"ovn-controller-metrics-p2lkw\" (UID: \"1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e\") " pod="openstack/ovn-controller-metrics-p2lkw" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.193037 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e-ovn-rundir\") pod \"ovn-controller-metrics-p2lkw\" (UID: \"1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e\") " pod="openstack/ovn-controller-metrics-p2lkw" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.193057 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8ce7915-7907-4d58-a404-fb3633c7d792-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-mtkxv\" (UID: \"e8ce7915-7907-4d58-a404-fb3633c7d792\") " pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.193077 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8ce7915-7907-4d58-a404-fb3633c7d792-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-mtkxv\" (UID: \"e8ce7915-7907-4d58-a404-fb3633c7d792\") " pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.193099 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzw45\" (UniqueName: \"kubernetes.io/projected/e8ce7915-7907-4d58-a404-fb3633c7d792-kube-api-access-zzw45\") pod \"dnsmasq-dns-7fd796d7df-mtkxv\" (UID: \"e8ce7915-7907-4d58-a404-fb3633c7d792\") " pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.193281 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e-ovn-rundir\") pod \"ovn-controller-metrics-p2lkw\" (UID: \"1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e\") " pod="openstack/ovn-controller-metrics-p2lkw" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.193279 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e-ovs-rundir\") pod \"ovn-controller-metrics-p2lkw\" (UID: \"1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e\") " pod="openstack/ovn-controller-metrics-p2lkw" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.194414 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e-config\") pod \"ovn-controller-metrics-p2lkw\" (UID: \"1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e\") " pod="openstack/ovn-controller-metrics-p2lkw" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.200111 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e-combined-ca-bundle\") pod \"ovn-controller-metrics-p2lkw\" (UID: \"1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e\") " pod="openstack/ovn-controller-metrics-p2lkw" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.201354 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-p2lkw\" (UID: \"1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e\") " pod="openstack/ovn-controller-metrics-p2lkw" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.220696 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c74g7\" (UniqueName: \"kubernetes.io/projected/1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e-kube-api-access-c74g7\") pod \"ovn-controller-metrics-p2lkw\" (UID: \"1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e\") " pod="openstack/ovn-controller-metrics-p2lkw" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.287405 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-kf66g"] Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.294841 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8ce7915-7907-4d58-a404-fb3633c7d792-config\") pod \"dnsmasq-dns-7fd796d7df-mtkxv\" (UID: \"e8ce7915-7907-4d58-a404-fb3633c7d792\") " pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.294922 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8ce7915-7907-4d58-a404-fb3633c7d792-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-mtkxv\" (UID: \"e8ce7915-7907-4d58-a404-fb3633c7d792\") " pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.294944 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8ce7915-7907-4d58-a404-fb3633c7d792-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-mtkxv\" (UID: \"e8ce7915-7907-4d58-a404-fb3633c7d792\") " pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.294962 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzw45\" (UniqueName: \"kubernetes.io/projected/e8ce7915-7907-4d58-a404-fb3633c7d792-kube-api-access-zzw45\") pod \"dnsmasq-dns-7fd796d7df-mtkxv\" (UID: \"e8ce7915-7907-4d58-a404-fb3633c7d792\") " pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.295723 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-p2lkw" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.295957 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8ce7915-7907-4d58-a404-fb3633c7d792-config\") pod \"dnsmasq-dns-7fd796d7df-mtkxv\" (UID: \"e8ce7915-7907-4d58-a404-fb3633c7d792\") " pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.296123 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8ce7915-7907-4d58-a404-fb3633c7d792-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-mtkxv\" (UID: \"e8ce7915-7907-4d58-a404-fb3633c7d792\") " pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.296248 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8ce7915-7907-4d58-a404-fb3633c7d792-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-mtkxv\" (UID: \"e8ce7915-7907-4d58-a404-fb3633c7d792\") " pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.325513 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzw45\" (UniqueName: \"kubernetes.io/projected/e8ce7915-7907-4d58-a404-fb3633c7d792-kube-api-access-zzw45\") pod \"dnsmasq-dns-7fd796d7df-mtkxv\" (UID: \"e8ce7915-7907-4d58-a404-fb3633c7d792\") " pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.332848 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-zhs6w"] Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.343878 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-zhs6w"] Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.344422 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.348230 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.396850 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-config\") pod \"dnsmasq-dns-86db49b7ff-zhs6w\" (UID: \"a321e834-3079-4d1d-b223-08c4ce184d81\") " pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.396905 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-zhs6w\" (UID: \"a321e834-3079-4d1d-b223-08c4ce184d81\") " pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.396961 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-zhs6w\" (UID: \"a321e834-3079-4d1d-b223-08c4ce184d81\") " pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.397065 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6q6jl\" (UniqueName: \"kubernetes.io/projected/a321e834-3079-4d1d-b223-08c4ce184d81-kube-api-access-6q6jl\") pod \"dnsmasq-dns-86db49b7ff-zhs6w\" (UID: \"a321e834-3079-4d1d-b223-08c4ce184d81\") " pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.397115 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-zhs6w\" (UID: \"a321e834-3079-4d1d-b223-08c4ce184d81\") " pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.475841 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.498281 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6q6jl\" (UniqueName: \"kubernetes.io/projected/a321e834-3079-4d1d-b223-08c4ce184d81-kube-api-access-6q6jl\") pod \"dnsmasq-dns-86db49b7ff-zhs6w\" (UID: \"a321e834-3079-4d1d-b223-08c4ce184d81\") " pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.498368 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-zhs6w\" (UID: \"a321e834-3079-4d1d-b223-08c4ce184d81\") " pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.498448 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-config\") pod \"dnsmasq-dns-86db49b7ff-zhs6w\" (UID: \"a321e834-3079-4d1d-b223-08c4ce184d81\") " pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.498471 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-zhs6w\" (UID: \"a321e834-3079-4d1d-b223-08c4ce184d81\") " pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.498528 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-zhs6w\" (UID: \"a321e834-3079-4d1d-b223-08c4ce184d81\") " pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.499746 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-config\") pod \"dnsmasq-dns-86db49b7ff-zhs6w\" (UID: \"a321e834-3079-4d1d-b223-08c4ce184d81\") " pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.499796 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-zhs6w\" (UID: \"a321e834-3079-4d1d-b223-08c4ce184d81\") " pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.499798 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-zhs6w\" (UID: \"a321e834-3079-4d1d-b223-08c4ce184d81\") " pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.499974 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-zhs6w\" (UID: \"a321e834-3079-4d1d-b223-08c4ce184d81\") " pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.517581 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6q6jl\" (UniqueName: \"kubernetes.io/projected/a321e834-3079-4d1d-b223-08c4ce184d81-kube-api-access-6q6jl\") pod \"dnsmasq-dns-86db49b7ff-zhs6w\" (UID: \"a321e834-3079-4d1d-b223-08c4ce184d81\") " pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.598680 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-p2lkw"] Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.685460 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" Feb 02 11:10:54 crc kubenswrapper[4838]: I0202 11:10:54.928750 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-mtkxv"] Feb 02 11:10:54 crc kubenswrapper[4838]: W0202 11:10:54.936108 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8ce7915_7907_4d58_a404_fb3633c7d792.slice/crio-26cf5a03efbed326fe84adbb17bfb34e060a285b0c2c28d79d92f9f03cd3006c WatchSource:0}: Error finding container 26cf5a03efbed326fe84adbb17bfb34e060a285b0c2c28d79d92f9f03cd3006c: Status 404 returned error can't find the container with id 26cf5a03efbed326fe84adbb17bfb34e060a285b0c2c28d79d92f9f03cd3006c Feb 02 11:10:55 crc kubenswrapper[4838]: I0202 11:10:55.147162 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-zhs6w"] Feb 02 11:10:55 crc kubenswrapper[4838]: W0202 11:10:55.148680 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda321e834_3079_4d1d_b223_08c4ce184d81.slice/crio-40d34f35eb847980e2e2aa065aa4a4ead3c36b14ba16e51446d6dcf2b664763a WatchSource:0}: Error finding container 40d34f35eb847980e2e2aa065aa4a4ead3c36b14ba16e51446d6dcf2b664763a: Status 404 returned error can't find the container with id 40d34f35eb847980e2e2aa065aa4a4ead3c36b14ba16e51446d6dcf2b664763a Feb 02 11:10:55 crc kubenswrapper[4838]: I0202 11:10:55.381636 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" event={"ID":"a321e834-3079-4d1d-b223-08c4ce184d81","Type":"ContainerStarted","Data":"40d34f35eb847980e2e2aa065aa4a4ead3c36b14ba16e51446d6dcf2b664763a"} Feb 02 11:10:55 crc kubenswrapper[4838]: I0202 11:10:55.383214 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-p2lkw" event={"ID":"1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e","Type":"ContainerStarted","Data":"ba43aed08bd20a50aa6175bfed06200ed23e1c7f6bb26feea16b577113d475ce"} Feb 02 11:10:55 crc kubenswrapper[4838]: I0202 11:10:55.384564 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" event={"ID":"e8ce7915-7907-4d58-a404-fb3633c7d792","Type":"ContainerStarted","Data":"26cf5a03efbed326fe84adbb17bfb34e060a285b0c2c28d79d92f9f03cd3006c"} Feb 02 11:11:08 crc kubenswrapper[4838]: E0202 11:11:08.036553 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Feb 02 11:11:08 crc kubenswrapper[4838]: E0202 11:11:08.037049 4838 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Feb 02 11:11:08 crc kubenswrapper[4838]: E0202 11:11:08.037183 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-state-metrics,Image:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,Command:[],Args:[--resources=pods --namespaces=openstack],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},ContainerPort{Name:telemetry,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5w7jg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod kube-state-metrics-0_openstack(039ec177-4229-482e-aeec-ec3db4349951): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 11:11:08 crc kubenswrapper[4838]: E0202 11:11:08.038270 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/kube-state-metrics-0" podUID="039ec177-4229-482e-aeec-ec3db4349951" Feb 02 11:11:08 crc kubenswrapper[4838]: I0202 11:11:08.473953 4838 generic.go:334] "Generic (PLEG): container finished" podID="6ef67aca-5d1e-42eb-a108-ed5db869c6fe" containerID="fa29acb541f2a74523e139e625de953486480b10678b987cf334bb1553ecaa45" exitCode=0 Feb 02 11:11:08 crc kubenswrapper[4838]: I0202 11:11:08.474276 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-kf66g" event={"ID":"6ef67aca-5d1e-42eb-a108-ed5db869c6fe","Type":"ContainerDied","Data":"fa29acb541f2a74523e139e625de953486480b10678b987cf334bb1553ecaa45"} Feb 02 11:11:08 crc kubenswrapper[4838]: E0202 11:11:08.475742 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0\\\"\"" pod="openstack/kube-state-metrics-0" podUID="039ec177-4229-482e-aeec-ec3db4349951" Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.270228 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-kf66g" Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.359762 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ef67aca-5d1e-42eb-a108-ed5db869c6fe-dns-svc\") pod \"6ef67aca-5d1e-42eb-a108-ed5db869c6fe\" (UID: \"6ef67aca-5d1e-42eb-a108-ed5db869c6fe\") " Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.359844 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ef67aca-5d1e-42eb-a108-ed5db869c6fe-config\") pod \"6ef67aca-5d1e-42eb-a108-ed5db869c6fe\" (UID: \"6ef67aca-5d1e-42eb-a108-ed5db869c6fe\") " Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.359920 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxvs8\" (UniqueName: \"kubernetes.io/projected/6ef67aca-5d1e-42eb-a108-ed5db869c6fe-kube-api-access-wxvs8\") pod \"6ef67aca-5d1e-42eb-a108-ed5db869c6fe\" (UID: \"6ef67aca-5d1e-42eb-a108-ed5db869c6fe\") " Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.364935 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ef67aca-5d1e-42eb-a108-ed5db869c6fe-kube-api-access-wxvs8" (OuterVolumeSpecName: "kube-api-access-wxvs8") pod "6ef67aca-5d1e-42eb-a108-ed5db869c6fe" (UID: "6ef67aca-5d1e-42eb-a108-ed5db869c6fe"). InnerVolumeSpecName "kube-api-access-wxvs8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.392812 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ef67aca-5d1e-42eb-a108-ed5db869c6fe-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6ef67aca-5d1e-42eb-a108-ed5db869c6fe" (UID: "6ef67aca-5d1e-42eb-a108-ed5db869c6fe"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.402161 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ef67aca-5d1e-42eb-a108-ed5db869c6fe-config" (OuterVolumeSpecName: "config") pod "6ef67aca-5d1e-42eb-a108-ed5db869c6fe" (UID: "6ef67aca-5d1e-42eb-a108-ed5db869c6fe"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.461685 4838 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ef67aca-5d1e-42eb-a108-ed5db869c6fe-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.461715 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ef67aca-5d1e-42eb-a108-ed5db869c6fe-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.461725 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxvs8\" (UniqueName: \"kubernetes.io/projected/6ef67aca-5d1e-42eb-a108-ed5db869c6fe-kube-api-access-wxvs8\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.483116 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-p2lkw" event={"ID":"1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e","Type":"ContainerStarted","Data":"9b9459f533dfc125b5cf931090624401f96cd49753e5c2af29f5e6191727a06a"} Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.485658 4838 generic.go:334] "Generic (PLEG): container finished" podID="e8ce7915-7907-4d58-a404-fb3633c7d792" containerID="30a6660035cd2b143ecec5b30fa6e1b14f5de333734e58c6058a748f4867225a" exitCode=0 Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.485728 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" event={"ID":"e8ce7915-7907-4d58-a404-fb3633c7d792","Type":"ContainerDied","Data":"30a6660035cd2b143ecec5b30fa6e1b14f5de333734e58c6058a748f4867225a"} Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.490190 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5b42feb4-a718-4036-be9e-3113b97680c4","Type":"ContainerStarted","Data":"1dcf85704ee405c4bffca6271f136c38b680e274bff77a0d0df5403b56ec2f6e"} Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.492229 4838 generic.go:334] "Generic (PLEG): container finished" podID="093bf62e-0c85-4d63-8dd4-4003b7f20122" containerID="ef2735de285713f8876b0da412e7261ddc3201a30b9563465cfbdb9643488c6e" exitCode=0 Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.492286 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-7d89g" event={"ID":"093bf62e-0c85-4d63-8dd4-4003b7f20122","Type":"ContainerDied","Data":"ef2735de285713f8876b0da412e7261ddc3201a30b9563465cfbdb9643488c6e"} Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.500475 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-kf66g" event={"ID":"6ef67aca-5d1e-42eb-a108-ed5db869c6fe","Type":"ContainerDied","Data":"d418ecc3121b2943b1cfb458db2c3973d3f5598a2ca240e347c7c2e2d56290c7"} Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.500534 4838 scope.go:117] "RemoveContainer" containerID="fa29acb541f2a74523e139e625de953486480b10678b987cf334bb1553ecaa45" Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.501364 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-kf66g" Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.508824 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-p2lkw" podStartSLOduration=4.118940072 podStartE2EDuration="16.508800819s" podCreationTimestamp="2026-02-02 11:10:53 +0000 UTC" firstStartedPulling="2026-02-02 11:10:54.605794655 +0000 UTC m=+1048.942895683" lastFinishedPulling="2026-02-02 11:11:06.995655402 +0000 UTC m=+1061.332756430" observedRunningTime="2026-02-02 11:11:09.498202729 +0000 UTC m=+1063.835303767" watchObservedRunningTime="2026-02-02 11:11:09.508800819 +0000 UTC m=+1063.845901857" Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.509536 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"a8af70b4-152f-4edb-a4c5-afc8baed3685","Type":"ContainerStarted","Data":"a7a852e2906446a41499efa5d45b3ebd26f4a23ce8277e6db638242d06971cff"} Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.537113 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"605eae6b-dbaf-4781-97bb-2ef09397141d","Type":"ContainerStarted","Data":"91ffc28088c413a1318e605fd3ad4275fa6ac8b939a37a09ee92317b15e3c72d"} Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.537605 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.582087 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"698d5f5d-683c-4130-8d4f-d1d59b5d32e4","Type":"ContainerStarted","Data":"bac490b36b57a0475b02c67e445444a2d8f9bcdd6b9fb515df62f3f92e6d09e1"} Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.599159 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"2f841e0c-b40f-4dd1-8427-ea07840bcdf6","Type":"ContainerStarted","Data":"4559f58537a048e41e86d82ebad76f6ae2906933c526366fb7e6fbecc7ebc124"} Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.617188 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=14.157722931 podStartE2EDuration="29.617164766s" podCreationTimestamp="2026-02-02 11:10:40 +0000 UTC" firstStartedPulling="2026-02-02 11:10:50.885828957 +0000 UTC m=+1045.222929985" lastFinishedPulling="2026-02-02 11:11:06.345270792 +0000 UTC m=+1060.682371820" observedRunningTime="2026-02-02 11:11:09.604144203 +0000 UTC m=+1063.941245241" watchObservedRunningTime="2026-02-02 11:11:09.617164766 +0000 UTC m=+1063.954265784" Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.767200 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-kf66g"] Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.772941 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-kf66g"] Feb 02 11:11:09 crc kubenswrapper[4838]: I0202 11:11:09.988504 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-7d89g" Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.073359 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/093bf62e-0c85-4d63-8dd4-4003b7f20122-config\") pod \"093bf62e-0c85-4d63-8dd4-4003b7f20122\" (UID: \"093bf62e-0c85-4d63-8dd4-4003b7f20122\") " Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.073427 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8pkv\" (UniqueName: \"kubernetes.io/projected/093bf62e-0c85-4d63-8dd4-4003b7f20122-kube-api-access-n8pkv\") pod \"093bf62e-0c85-4d63-8dd4-4003b7f20122\" (UID: \"093bf62e-0c85-4d63-8dd4-4003b7f20122\") " Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.073532 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/093bf62e-0c85-4d63-8dd4-4003b7f20122-dns-svc\") pod \"093bf62e-0c85-4d63-8dd4-4003b7f20122\" (UID: \"093bf62e-0c85-4d63-8dd4-4003b7f20122\") " Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.077532 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/093bf62e-0c85-4d63-8dd4-4003b7f20122-kube-api-access-n8pkv" (OuterVolumeSpecName: "kube-api-access-n8pkv") pod "093bf62e-0c85-4d63-8dd4-4003b7f20122" (UID: "093bf62e-0c85-4d63-8dd4-4003b7f20122"). InnerVolumeSpecName "kube-api-access-n8pkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.092517 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/093bf62e-0c85-4d63-8dd4-4003b7f20122-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "093bf62e-0c85-4d63-8dd4-4003b7f20122" (UID: "093bf62e-0c85-4d63-8dd4-4003b7f20122"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.093727 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/093bf62e-0c85-4d63-8dd4-4003b7f20122-config" (OuterVolumeSpecName: "config") pod "093bf62e-0c85-4d63-8dd4-4003b7f20122" (UID: "093bf62e-0c85-4d63-8dd4-4003b7f20122"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.175497 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/093bf62e-0c85-4d63-8dd4-4003b7f20122-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.175530 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8pkv\" (UniqueName: \"kubernetes.io/projected/093bf62e-0c85-4d63-8dd4-4003b7f20122-kube-api-access-n8pkv\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.175540 4838 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/093bf62e-0c85-4d63-8dd4-4003b7f20122-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.519890 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ef67aca-5d1e-42eb-a108-ed5db869c6fe" path="/var/lib/kubelet/pods/6ef67aca-5d1e-42eb-a108-ed5db869c6fe/volumes" Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.608868 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" event={"ID":"e8ce7915-7907-4d58-a404-fb3633c7d792","Type":"ContainerStarted","Data":"993b93ff19731716cd1684da8b490e9f73bb6066da04476a8e7010103af6b603"} Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.609006 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.612003 4838 generic.go:334] "Generic (PLEG): container finished" podID="a321e834-3079-4d1d-b223-08c4ce184d81" containerID="338900de346508d9b483091abf67b1b8929c00ba58902bff130a5ad5765bfd93" exitCode=0 Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.612042 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" event={"ID":"a321e834-3079-4d1d-b223-08c4ce184d81","Type":"ContainerDied","Data":"338900de346508d9b483091abf67b1b8929c00ba58902bff130a5ad5765bfd93"} Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.615037 4838 generic.go:334] "Generic (PLEG): container finished" podID="6201c1a8-a058-4029-ac96-17f4500b9fc0" containerID="b81842575f29582a987b17061a98c999284dccb8b17ea815f0629750a6c8614f" exitCode=0 Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.615187 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-gmk9d" event={"ID":"6201c1a8-a058-4029-ac96-17f4500b9fc0","Type":"ContainerDied","Data":"b81842575f29582a987b17061a98c999284dccb8b17ea815f0629750a6c8614f"} Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.618359 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-7d89g" Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.618742 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-7d89g" event={"ID":"093bf62e-0c85-4d63-8dd4-4003b7f20122","Type":"ContainerDied","Data":"73524f146fe6f7958e3cdad95c2b9b0689e52376da965733b2fb01e12f423961"} Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.618786 4838 scope.go:117] "RemoveContainer" containerID="ef2735de285713f8876b0da412e7261ddc3201a30b9563465cfbdb9643488c6e" Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.627798 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018","Type":"ContainerStarted","Data":"9757444c3c0487546551824c79f785e6f58d72aac3126406d2b6b64ecc8b78ab"} Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.627852 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"bbb8b7a6-28d0-40fa-bdcb-fe95357c8018","Type":"ContainerStarted","Data":"414b7bd2006359cc63df8f5326e8c3f7eb98b5c12af743ecc7d649798ba884c6"} Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.636513 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"a8af70b4-152f-4edb-a4c5-afc8baed3685","Type":"ContainerStarted","Data":"a464f559f95b38e7e589aff7830b8765728edce4d150595e2daa07925e4794fa"} Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.648707 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"10f55730-6ea0-4989-a006-b0549f5566a7","Type":"ContainerStarted","Data":"70f80cc3e845e9e6389966fa131ac34e28094b7a7010f3ba63b7ea5f71fcac55"} Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.655266 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" podStartSLOduration=4.499143195 podStartE2EDuration="16.655246958s" podCreationTimestamp="2026-02-02 11:10:54 +0000 UTC" firstStartedPulling="2026-02-02 11:10:54.938637302 +0000 UTC m=+1049.275738330" lastFinishedPulling="2026-02-02 11:11:07.094741065 +0000 UTC m=+1061.431842093" observedRunningTime="2026-02-02 11:11:10.646096957 +0000 UTC m=+1064.983198015" watchObservedRunningTime="2026-02-02 11:11:10.655246958 +0000 UTC m=+1064.992347996" Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.661916 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-78llm" event={"ID":"89d377c3-4929-47c4-abc7-53bb5e058025","Type":"ContainerStarted","Data":"1991568f7e61ac231c980f8398e5c49e6b5b1446b3e0267fad6d74a74e0e52ea"} Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.662020 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-78llm" Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.689324 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=9.636565096 podStartE2EDuration="25.689305556s" podCreationTimestamp="2026-02-02 11:10:45 +0000 UTC" firstStartedPulling="2026-02-02 11:10:51.032966807 +0000 UTC m=+1045.370067845" lastFinishedPulling="2026-02-02 11:11:07.085707277 +0000 UTC m=+1061.422808305" observedRunningTime="2026-02-02 11:11:10.681545582 +0000 UTC m=+1065.018646640" watchObservedRunningTime="2026-02-02 11:11:10.689305556 +0000 UTC m=+1065.026406584" Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.762412 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=8.151908064 podStartE2EDuration="22.762395994s" podCreationTimestamp="2026-02-02 11:10:48 +0000 UTC" firstStartedPulling="2026-02-02 11:10:52.598309632 +0000 UTC m=+1046.935410670" lastFinishedPulling="2026-02-02 11:11:07.208797562 +0000 UTC m=+1061.545898600" observedRunningTime="2026-02-02 11:11:10.761857679 +0000 UTC m=+1065.098958727" watchObservedRunningTime="2026-02-02 11:11:10.762395994 +0000 UTC m=+1065.099497022" Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.827057 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-78llm" podStartSLOduration=9.371024505 podStartE2EDuration="25.827039658s" podCreationTimestamp="2026-02-02 11:10:45 +0000 UTC" firstStartedPulling="2026-02-02 11:10:50.880398594 +0000 UTC m=+1045.217499622" lastFinishedPulling="2026-02-02 11:11:07.336413747 +0000 UTC m=+1061.673514775" observedRunningTime="2026-02-02 11:11:10.814466197 +0000 UTC m=+1065.151567245" watchObservedRunningTime="2026-02-02 11:11:10.827039658 +0000 UTC m=+1065.164140686" Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.851214 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7d89g"] Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.857807 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7d89g"] Feb 02 11:11:10 crc kubenswrapper[4838]: I0202 11:11:10.899032 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Feb 02 11:11:11 crc kubenswrapper[4838]: I0202 11:11:11.247661 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Feb 02 11:11:11 crc kubenswrapper[4838]: I0202 11:11:11.297270 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Feb 02 11:11:11 crc kubenswrapper[4838]: I0202 11:11:11.670658 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-gmk9d" event={"ID":"6201c1a8-a058-4029-ac96-17f4500b9fc0","Type":"ContainerStarted","Data":"e0cbf9e52000c81a24f23929df73940e4925ee053aeb82cf7b217fb0c8b4d088"} Feb 02 11:11:11 crc kubenswrapper[4838]: I0202 11:11:11.670713 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-gmk9d" event={"ID":"6201c1a8-a058-4029-ac96-17f4500b9fc0","Type":"ContainerStarted","Data":"88ef465b43671e08274def09121d7bbae405546590415d8715a6717571fc3cee"} Feb 02 11:11:11 crc kubenswrapper[4838]: I0202 11:11:11.672231 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:11:11 crc kubenswrapper[4838]: I0202 11:11:11.672276 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:11:11 crc kubenswrapper[4838]: I0202 11:11:11.677712 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" event={"ID":"a321e834-3079-4d1d-b223-08c4ce184d81","Type":"ContainerStarted","Data":"0fb71b06c761edfcd0b4055c79cd9d1bd826ac3c32468a35a80a1df6b132818b"} Feb 02 11:11:11 crc kubenswrapper[4838]: I0202 11:11:11.678006 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" Feb 02 11:11:11 crc kubenswrapper[4838]: I0202 11:11:11.680086 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Feb 02 11:11:11 crc kubenswrapper[4838]: I0202 11:11:11.694947 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-gmk9d" podStartSLOduration=11.318420723 podStartE2EDuration="26.694928182s" podCreationTimestamp="2026-02-02 11:10:45 +0000 UTC" firstStartedPulling="2026-02-02 11:10:51.622380178 +0000 UTC m=+1045.959481206" lastFinishedPulling="2026-02-02 11:11:06.998887637 +0000 UTC m=+1061.335988665" observedRunningTime="2026-02-02 11:11:11.69373184 +0000 UTC m=+1066.030832928" watchObservedRunningTime="2026-02-02 11:11:11.694928182 +0000 UTC m=+1066.032029210" Feb 02 11:11:11 crc kubenswrapper[4838]: I0202 11:11:11.736761 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" podStartSLOduration=4.863169535 podStartE2EDuration="17.736741075s" podCreationTimestamp="2026-02-02 11:10:54 +0000 UTC" firstStartedPulling="2026-02-02 11:10:55.159250749 +0000 UTC m=+1049.496351777" lastFinishedPulling="2026-02-02 11:11:08.032822289 +0000 UTC m=+1062.369923317" observedRunningTime="2026-02-02 11:11:11.722083088 +0000 UTC m=+1066.059184136" watchObservedRunningTime="2026-02-02 11:11:11.736741075 +0000 UTC m=+1066.073842093" Feb 02 11:11:11 crc kubenswrapper[4838]: I0202 11:11:11.898910 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Feb 02 11:11:12 crc kubenswrapper[4838]: I0202 11:11:12.515455 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="093bf62e-0c85-4d63-8dd4-4003b7f20122" path="/var/lib/kubelet/pods/093bf62e-0c85-4d63-8dd4-4003b7f20122/volumes" Feb 02 11:11:13 crc kubenswrapper[4838]: I0202 11:11:13.936786 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Feb 02 11:11:14 crc kubenswrapper[4838]: I0202 11:11:14.478863 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" Feb 02 11:11:14 crc kubenswrapper[4838]: I0202 11:11:14.708416 4838 generic.go:334] "Generic (PLEG): container finished" podID="2f841e0c-b40f-4dd1-8427-ea07840bcdf6" containerID="4559f58537a048e41e86d82ebad76f6ae2906933c526366fb7e6fbecc7ebc124" exitCode=0 Feb 02 11:11:14 crc kubenswrapper[4838]: I0202 11:11:14.708486 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"2f841e0c-b40f-4dd1-8427-ea07840bcdf6","Type":"ContainerDied","Data":"4559f58537a048e41e86d82ebad76f6ae2906933c526366fb7e6fbecc7ebc124"} Feb 02 11:11:14 crc kubenswrapper[4838]: I0202 11:11:14.711257 4838 generic.go:334] "Generic (PLEG): container finished" podID="5b42feb4-a718-4036-be9e-3113b97680c4" containerID="1dcf85704ee405c4bffca6271f136c38b680e274bff77a0d0df5403b56ec2f6e" exitCode=0 Feb 02 11:11:14 crc kubenswrapper[4838]: I0202 11:11:14.712077 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5b42feb4-a718-4036-be9e-3113b97680c4","Type":"ContainerDied","Data":"1dcf85704ee405c4bffca6271f136c38b680e274bff77a0d0df5403b56ec2f6e"} Feb 02 11:11:14 crc kubenswrapper[4838]: I0202 11:11:14.771123 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Feb 02 11:11:15 crc kubenswrapper[4838]: I0202 11:11:15.279283 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Feb 02 11:11:15 crc kubenswrapper[4838]: I0202 11:11:15.529412 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Feb 02 11:11:15 crc kubenswrapper[4838]: E0202 11:11:15.529732 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ef67aca-5d1e-42eb-a108-ed5db869c6fe" containerName="init" Feb 02 11:11:15 crc kubenswrapper[4838]: I0202 11:11:15.529748 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ef67aca-5d1e-42eb-a108-ed5db869c6fe" containerName="init" Feb 02 11:11:15 crc kubenswrapper[4838]: E0202 11:11:15.529770 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="093bf62e-0c85-4d63-8dd4-4003b7f20122" containerName="init" Feb 02 11:11:15 crc kubenswrapper[4838]: I0202 11:11:15.529776 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="093bf62e-0c85-4d63-8dd4-4003b7f20122" containerName="init" Feb 02 11:11:15 crc kubenswrapper[4838]: I0202 11:11:15.529910 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="093bf62e-0c85-4d63-8dd4-4003b7f20122" containerName="init" Feb 02 11:11:15 crc kubenswrapper[4838]: I0202 11:11:15.529920 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ef67aca-5d1e-42eb-a108-ed5db869c6fe" containerName="init" Feb 02 11:11:15 crc kubenswrapper[4838]: I0202 11:11:15.530602 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 02 11:11:15 crc kubenswrapper[4838]: I0202 11:11:15.536068 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-jxs6b" Feb 02 11:11:15 crc kubenswrapper[4838]: I0202 11:11:15.536203 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Feb 02 11:11:15 crc kubenswrapper[4838]: I0202 11:11:15.536301 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Feb 02 11:11:15 crc kubenswrapper[4838]: I0202 11:11:15.536391 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Feb 02 11:11:15 crc kubenswrapper[4838]: I0202 11:11:15.549645 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.001954 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e6a9dbb-63ef-4cf2-b725-254ad752937d-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"6e6a9dbb-63ef-4cf2-b725-254ad752937d\") " pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.002219 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e6a9dbb-63ef-4cf2-b725-254ad752937d-config\") pod \"ovn-northd-0\" (UID: \"6e6a9dbb-63ef-4cf2-b725-254ad752937d\") " pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.002264 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e6a9dbb-63ef-4cf2-b725-254ad752937d-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"6e6a9dbb-63ef-4cf2-b725-254ad752937d\") " pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.002311 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e6a9dbb-63ef-4cf2-b725-254ad752937d-scripts\") pod \"ovn-northd-0\" (UID: \"6e6a9dbb-63ef-4cf2-b725-254ad752937d\") " pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.002386 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6e6a9dbb-63ef-4cf2-b725-254ad752937d-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"6e6a9dbb-63ef-4cf2-b725-254ad752937d\") " pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.002506 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e6a9dbb-63ef-4cf2-b725-254ad752937d-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"6e6a9dbb-63ef-4cf2-b725-254ad752937d\") " pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.002563 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bkpf\" (UniqueName: \"kubernetes.io/projected/6e6a9dbb-63ef-4cf2-b725-254ad752937d-kube-api-access-9bkpf\") pod \"ovn-northd-0\" (UID: \"6e6a9dbb-63ef-4cf2-b725-254ad752937d\") " pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.003864 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.015888 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"2f841e0c-b40f-4dd1-8427-ea07840bcdf6","Type":"ContainerStarted","Data":"871ae1cb196cf07697f15232456a4f8625dc20f3958214632f5cf3ddbf34d14a"} Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.021754 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5b42feb4-a718-4036-be9e-3113b97680c4","Type":"ContainerStarted","Data":"716e3670226e5acea14bcfef4f5e0b6ca381026181e3abbf6df938b510730f6a"} Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.101670 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=22.553775373 podStartE2EDuration="39.101640909s" podCreationTimestamp="2026-02-02 11:10:37 +0000 UTC" firstStartedPulling="2026-02-02 11:10:50.415283899 +0000 UTC m=+1044.752384917" lastFinishedPulling="2026-02-02 11:11:06.963149425 +0000 UTC m=+1061.300250453" observedRunningTime="2026-02-02 11:11:16.089420517 +0000 UTC m=+1070.426521555" watchObservedRunningTime="2026-02-02 11:11:16.101640909 +0000 UTC m=+1070.438741957" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.104278 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e6a9dbb-63ef-4cf2-b725-254ad752937d-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"6e6a9dbb-63ef-4cf2-b725-254ad752937d\") " pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.104415 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e6a9dbb-63ef-4cf2-b725-254ad752937d-config\") pod \"ovn-northd-0\" (UID: \"6e6a9dbb-63ef-4cf2-b725-254ad752937d\") " pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.104453 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e6a9dbb-63ef-4cf2-b725-254ad752937d-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"6e6a9dbb-63ef-4cf2-b725-254ad752937d\") " pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.104474 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e6a9dbb-63ef-4cf2-b725-254ad752937d-scripts\") pod \"ovn-northd-0\" (UID: \"6e6a9dbb-63ef-4cf2-b725-254ad752937d\") " pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.104513 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6e6a9dbb-63ef-4cf2-b725-254ad752937d-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"6e6a9dbb-63ef-4cf2-b725-254ad752937d\") " pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.104562 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e6a9dbb-63ef-4cf2-b725-254ad752937d-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"6e6a9dbb-63ef-4cf2-b725-254ad752937d\") " pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.104584 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bkpf\" (UniqueName: \"kubernetes.io/projected/6e6a9dbb-63ef-4cf2-b725-254ad752937d-kube-api-access-9bkpf\") pod \"ovn-northd-0\" (UID: \"6e6a9dbb-63ef-4cf2-b725-254ad752937d\") " pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.105251 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6e6a9dbb-63ef-4cf2-b725-254ad752937d-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"6e6a9dbb-63ef-4cf2-b725-254ad752937d\") " pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.105483 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e6a9dbb-63ef-4cf2-b725-254ad752937d-config\") pod \"ovn-northd-0\" (UID: \"6e6a9dbb-63ef-4cf2-b725-254ad752937d\") " pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.105484 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e6a9dbb-63ef-4cf2-b725-254ad752937d-scripts\") pod \"ovn-northd-0\" (UID: \"6e6a9dbb-63ef-4cf2-b725-254ad752937d\") " pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.111021 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e6a9dbb-63ef-4cf2-b725-254ad752937d-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"6e6a9dbb-63ef-4cf2-b725-254ad752937d\") " pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.112207 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e6a9dbb-63ef-4cf2-b725-254ad752937d-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"6e6a9dbb-63ef-4cf2-b725-254ad752937d\") " pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.112272 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e6a9dbb-63ef-4cf2-b725-254ad752937d-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"6e6a9dbb-63ef-4cf2-b725-254ad752937d\") " pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.191724 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bkpf\" (UniqueName: \"kubernetes.io/projected/6e6a9dbb-63ef-4cf2-b725-254ad752937d-kube-api-access-9bkpf\") pod \"ovn-northd-0\" (UID: \"6e6a9dbb-63ef-4cf2-b725-254ad752937d\") " pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.220542 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=21.086628734 podStartE2EDuration="37.220521184s" podCreationTimestamp="2026-02-02 11:10:39 +0000 UTC" firstStartedPulling="2026-02-02 11:10:50.828280479 +0000 UTC m=+1045.165381507" lastFinishedPulling="2026-02-02 11:11:06.962172929 +0000 UTC m=+1061.299273957" observedRunningTime="2026-02-02 11:11:16.219926748 +0000 UTC m=+1070.557027776" watchObservedRunningTime="2026-02-02 11:11:16.220521184 +0000 UTC m=+1070.557622222" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.458286 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 02 11:11:16 crc kubenswrapper[4838]: I0202 11:11:16.934662 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Feb 02 11:11:16 crc kubenswrapper[4838]: W0202 11:11:16.939775 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e6a9dbb_63ef_4cf2_b725_254ad752937d.slice/crio-857c69b98b4d46ecdbaf8c214e78732bfe0705e94df805d3c624526ee7432da3 WatchSource:0}: Error finding container 857c69b98b4d46ecdbaf8c214e78732bfe0705e94df805d3c624526ee7432da3: Status 404 returned error can't find the container with id 857c69b98b4d46ecdbaf8c214e78732bfe0705e94df805d3c624526ee7432da3 Feb 02 11:11:17 crc kubenswrapper[4838]: I0202 11:11:17.030175 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"6e6a9dbb-63ef-4cf2-b725-254ad752937d","Type":"ContainerStarted","Data":"857c69b98b4d46ecdbaf8c214e78732bfe0705e94df805d3c624526ee7432da3"} Feb 02 11:11:19 crc kubenswrapper[4838]: I0202 11:11:19.049797 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Feb 02 11:11:19 crc kubenswrapper[4838]: I0202 11:11:19.050194 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Feb 02 11:11:19 crc kubenswrapper[4838]: I0202 11:11:19.687655 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" Feb 02 11:11:19 crc kubenswrapper[4838]: I0202 11:11:19.757456 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-mtkxv"] Feb 02 11:11:19 crc kubenswrapper[4838]: I0202 11:11:19.757744 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" podUID="e8ce7915-7907-4d58-a404-fb3633c7d792" containerName="dnsmasq-dns" containerID="cri-o://993b93ff19731716cd1684da8b490e9f73bb6066da04476a8e7010103af6b603" gracePeriod=10 Feb 02 11:11:20 crc kubenswrapper[4838]: I0202 11:11:20.502377 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Feb 02 11:11:20 crc kubenswrapper[4838]: I0202 11:11:20.502415 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Feb 02 11:11:21 crc kubenswrapper[4838]: I0202 11:11:21.065237 4838 generic.go:334] "Generic (PLEG): container finished" podID="e8ce7915-7907-4d58-a404-fb3633c7d792" containerID="993b93ff19731716cd1684da8b490e9f73bb6066da04476a8e7010103af6b603" exitCode=0 Feb 02 11:11:21 crc kubenswrapper[4838]: I0202 11:11:21.065280 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" event={"ID":"e8ce7915-7907-4d58-a404-fb3633c7d792","Type":"ContainerDied","Data":"993b93ff19731716cd1684da8b490e9f73bb6066da04476a8e7010103af6b603"} Feb 02 11:11:22 crc kubenswrapper[4838]: I0202 11:11:22.932843 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-v77l6"] Feb 02 11:11:22 crc kubenswrapper[4838]: I0202 11:11:22.941945 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:11:22 crc kubenswrapper[4838]: I0202 11:11:22.951131 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-v77l6"] Feb 02 11:11:23 crc kubenswrapper[4838]: I0202 11:11:23.128505 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-v77l6\" (UID: \"646dcf2a-3e0a-4470-8950-40543c320e6f\") " pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:11:23 crc kubenswrapper[4838]: I0202 11:11:23.128584 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-dns-svc\") pod \"dnsmasq-dns-698758b865-v77l6\" (UID: \"646dcf2a-3e0a-4470-8950-40543c320e6f\") " pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:11:23 crc kubenswrapper[4838]: I0202 11:11:23.128661 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-config\") pod \"dnsmasq-dns-698758b865-v77l6\" (UID: \"646dcf2a-3e0a-4470-8950-40543c320e6f\") " pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:11:23 crc kubenswrapper[4838]: I0202 11:11:23.128697 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-v77l6\" (UID: \"646dcf2a-3e0a-4470-8950-40543c320e6f\") " pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:11:23 crc kubenswrapper[4838]: I0202 11:11:23.128771 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7d8q\" (UniqueName: \"kubernetes.io/projected/646dcf2a-3e0a-4470-8950-40543c320e6f-kube-api-access-d7d8q\") pod \"dnsmasq-dns-698758b865-v77l6\" (UID: \"646dcf2a-3e0a-4470-8950-40543c320e6f\") " pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:11:23 crc kubenswrapper[4838]: I0202 11:11:23.230723 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7d8q\" (UniqueName: \"kubernetes.io/projected/646dcf2a-3e0a-4470-8950-40543c320e6f-kube-api-access-d7d8q\") pod \"dnsmasq-dns-698758b865-v77l6\" (UID: \"646dcf2a-3e0a-4470-8950-40543c320e6f\") " pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:11:23 crc kubenswrapper[4838]: I0202 11:11:23.230854 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-v77l6\" (UID: \"646dcf2a-3e0a-4470-8950-40543c320e6f\") " pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:11:23 crc kubenswrapper[4838]: I0202 11:11:23.230907 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-dns-svc\") pod \"dnsmasq-dns-698758b865-v77l6\" (UID: \"646dcf2a-3e0a-4470-8950-40543c320e6f\") " pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:11:23 crc kubenswrapper[4838]: I0202 11:11:23.230932 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-config\") pod \"dnsmasq-dns-698758b865-v77l6\" (UID: \"646dcf2a-3e0a-4470-8950-40543c320e6f\") " pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:11:23 crc kubenswrapper[4838]: I0202 11:11:23.232093 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-v77l6\" (UID: \"646dcf2a-3e0a-4470-8950-40543c320e6f\") " pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:11:23 crc kubenswrapper[4838]: I0202 11:11:23.232111 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-v77l6\" (UID: \"646dcf2a-3e0a-4470-8950-40543c320e6f\") " pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:11:23 crc kubenswrapper[4838]: I0202 11:11:23.232335 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-config\") pod \"dnsmasq-dns-698758b865-v77l6\" (UID: \"646dcf2a-3e0a-4470-8950-40543c320e6f\") " pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:11:23 crc kubenswrapper[4838]: I0202 11:11:23.230975 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-v77l6\" (UID: \"646dcf2a-3e0a-4470-8950-40543c320e6f\") " pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:11:23 crc kubenswrapper[4838]: I0202 11:11:23.232732 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-dns-svc\") pod \"dnsmasq-dns-698758b865-v77l6\" (UID: \"646dcf2a-3e0a-4470-8950-40543c320e6f\") " pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:11:23 crc kubenswrapper[4838]: I0202 11:11:23.262609 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7d8q\" (UniqueName: \"kubernetes.io/projected/646dcf2a-3e0a-4470-8950-40543c320e6f-kube-api-access-d7d8q\") pod \"dnsmasq-dns-698758b865-v77l6\" (UID: \"646dcf2a-3e0a-4470-8950-40543c320e6f\") " pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:11:23 crc kubenswrapper[4838]: I0202 11:11:23.562196 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.062403 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.072941 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.076398 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.076701 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-br5ls" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.077131 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.077431 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.106873 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.248533 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.248577 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-etc-swift\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.248618 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/12e53950-9847-46b2-a51a-1fac5b690098-cache\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.248702 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/12e53950-9847-46b2-a51a-1fac5b690098-lock\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.248746 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12e53950-9847-46b2-a51a-1fac5b690098-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.248791 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bgff\" (UniqueName: \"kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-kube-api-access-4bgff\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.350336 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12e53950-9847-46b2-a51a-1fac5b690098-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.350404 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bgff\" (UniqueName: \"kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-kube-api-access-4bgff\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.350439 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.350456 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-etc-swift\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.350489 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/12e53950-9847-46b2-a51a-1fac5b690098-cache\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.350521 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/12e53950-9847-46b2-a51a-1fac5b690098-lock\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:24 crc kubenswrapper[4838]: E0202 11:11:24.350610 4838 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 02 11:11:24 crc kubenswrapper[4838]: E0202 11:11:24.350629 4838 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 02 11:11:24 crc kubenswrapper[4838]: E0202 11:11:24.350685 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-etc-swift podName:12e53950-9847-46b2-a51a-1fac5b690098 nodeName:}" failed. No retries permitted until 2026-02-02 11:11:24.8506707 +0000 UTC m=+1079.187771718 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-etc-swift") pod "swift-storage-0" (UID: "12e53950-9847-46b2-a51a-1fac5b690098") : configmap "swift-ring-files" not found Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.350923 4838 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/swift-storage-0" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.350973 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/12e53950-9847-46b2-a51a-1fac5b690098-lock\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.351044 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/12e53950-9847-46b2-a51a-1fac5b690098-cache\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.355064 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12e53950-9847-46b2-a51a-1fac5b690098-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.368260 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bgff\" (UniqueName: \"kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-kube-api-access-4bgff\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.373746 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:24 crc kubenswrapper[4838]: I0202 11:11:24.859516 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-etc-swift\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:24 crc kubenswrapper[4838]: E0202 11:11:24.859758 4838 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 02 11:11:24 crc kubenswrapper[4838]: E0202 11:11:24.860111 4838 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 02 11:11:24 crc kubenswrapper[4838]: E0202 11:11:24.860181 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-etc-swift podName:12e53950-9847-46b2-a51a-1fac5b690098 nodeName:}" failed. No retries permitted until 2026-02-02 11:11:25.860160395 +0000 UTC m=+1080.197261423 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-etc-swift") pod "swift-storage-0" (UID: "12e53950-9847-46b2-a51a-1fac5b690098") : configmap "swift-ring-files" not found Feb 02 11:11:25 crc kubenswrapper[4838]: I0202 11:11:25.878086 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-etc-swift\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:25 crc kubenswrapper[4838]: E0202 11:11:25.878308 4838 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 02 11:11:25 crc kubenswrapper[4838]: E0202 11:11:25.878340 4838 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 02 11:11:25 crc kubenswrapper[4838]: E0202 11:11:25.878428 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-etc-swift podName:12e53950-9847-46b2-a51a-1fac5b690098 nodeName:}" failed. No retries permitted until 2026-02-02 11:11:27.878398573 +0000 UTC m=+1082.215499641 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-etc-swift") pod "swift-storage-0" (UID: "12e53950-9847-46b2-a51a-1fac5b690098") : configmap "swift-ring-files" not found Feb 02 11:11:27 crc kubenswrapper[4838]: I0202 11:11:27.913738 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-etc-swift\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:27 crc kubenswrapper[4838]: E0202 11:11:27.913891 4838 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 02 11:11:27 crc kubenswrapper[4838]: E0202 11:11:27.914029 4838 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 02 11:11:27 crc kubenswrapper[4838]: E0202 11:11:27.914076 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-etc-swift podName:12e53950-9847-46b2-a51a-1fac5b690098 nodeName:}" failed. No retries permitted until 2026-02-02 11:11:31.91406204 +0000 UTC m=+1086.251163068 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-etc-swift") pod "swift-storage-0" (UID: "12e53950-9847-46b2-a51a-1fac5b690098") : configmap "swift-ring-files" not found Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.057896 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-524zj"] Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.059066 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.060924 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.061286 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.061471 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.073166 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-524zj"] Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.219172 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/582a43e1-d21a-4421-ae28-0eecd147d19e-ring-data-devices\") pod \"swift-ring-rebalance-524zj\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.219509 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/582a43e1-d21a-4421-ae28-0eecd147d19e-dispersionconf\") pod \"swift-ring-rebalance-524zj\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.219551 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/582a43e1-d21a-4421-ae28-0eecd147d19e-combined-ca-bundle\") pod \"swift-ring-rebalance-524zj\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.219649 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/582a43e1-d21a-4421-ae28-0eecd147d19e-scripts\") pod \"swift-ring-rebalance-524zj\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.219673 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82t2v\" (UniqueName: \"kubernetes.io/projected/582a43e1-d21a-4421-ae28-0eecd147d19e-kube-api-access-82t2v\") pod \"swift-ring-rebalance-524zj\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.219751 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/582a43e1-d21a-4421-ae28-0eecd147d19e-etc-swift\") pod \"swift-ring-rebalance-524zj\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.219792 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/582a43e1-d21a-4421-ae28-0eecd147d19e-swiftconf\") pod \"swift-ring-rebalance-524zj\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.321147 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/582a43e1-d21a-4421-ae28-0eecd147d19e-scripts\") pod \"swift-ring-rebalance-524zj\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.321187 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82t2v\" (UniqueName: \"kubernetes.io/projected/582a43e1-d21a-4421-ae28-0eecd147d19e-kube-api-access-82t2v\") pod \"swift-ring-rebalance-524zj\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.321229 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/582a43e1-d21a-4421-ae28-0eecd147d19e-etc-swift\") pod \"swift-ring-rebalance-524zj\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.321253 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/582a43e1-d21a-4421-ae28-0eecd147d19e-swiftconf\") pod \"swift-ring-rebalance-524zj\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.321316 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/582a43e1-d21a-4421-ae28-0eecd147d19e-ring-data-devices\") pod \"swift-ring-rebalance-524zj\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.321344 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/582a43e1-d21a-4421-ae28-0eecd147d19e-dispersionconf\") pod \"swift-ring-rebalance-524zj\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.321364 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/582a43e1-d21a-4421-ae28-0eecd147d19e-combined-ca-bundle\") pod \"swift-ring-rebalance-524zj\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.321929 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/582a43e1-d21a-4421-ae28-0eecd147d19e-etc-swift\") pod \"swift-ring-rebalance-524zj\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.322271 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/582a43e1-d21a-4421-ae28-0eecd147d19e-ring-data-devices\") pod \"swift-ring-rebalance-524zj\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.322574 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/582a43e1-d21a-4421-ae28-0eecd147d19e-scripts\") pod \"swift-ring-rebalance-524zj\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.330081 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/582a43e1-d21a-4421-ae28-0eecd147d19e-dispersionconf\") pod \"swift-ring-rebalance-524zj\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.330381 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/582a43e1-d21a-4421-ae28-0eecd147d19e-swiftconf\") pod \"swift-ring-rebalance-524zj\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.334623 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/582a43e1-d21a-4421-ae28-0eecd147d19e-combined-ca-bundle\") pod \"swift-ring-rebalance-524zj\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.338358 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82t2v\" (UniqueName: \"kubernetes.io/projected/582a43e1-d21a-4421-ae28-0eecd147d19e-kube-api-access-82t2v\") pod \"swift-ring-rebalance-524zj\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.383046 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-br5ls" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.391289 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.714630 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.729392 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.817205 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.833802 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8ce7915-7907-4d58-a404-fb3633c7d792-ovsdbserver-nb\") pod \"e8ce7915-7907-4d58-a404-fb3633c7d792\" (UID: \"e8ce7915-7907-4d58-a404-fb3633c7d792\") " Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.833898 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8ce7915-7907-4d58-a404-fb3633c7d792-dns-svc\") pod \"e8ce7915-7907-4d58-a404-fb3633c7d792\" (UID: \"e8ce7915-7907-4d58-a404-fb3633c7d792\") " Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.833990 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8ce7915-7907-4d58-a404-fb3633c7d792-config\") pod \"e8ce7915-7907-4d58-a404-fb3633c7d792\" (UID: \"e8ce7915-7907-4d58-a404-fb3633c7d792\") " Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.834066 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzw45\" (UniqueName: \"kubernetes.io/projected/e8ce7915-7907-4d58-a404-fb3633c7d792-kube-api-access-zzw45\") pod \"e8ce7915-7907-4d58-a404-fb3633c7d792\" (UID: \"e8ce7915-7907-4d58-a404-fb3633c7d792\") " Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.844868 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8ce7915-7907-4d58-a404-fb3633c7d792-kube-api-access-zzw45" (OuterVolumeSpecName: "kube-api-access-zzw45") pod "e8ce7915-7907-4d58-a404-fb3633c7d792" (UID: "e8ce7915-7907-4d58-a404-fb3633c7d792"). InnerVolumeSpecName "kube-api-access-zzw45". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.885839 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8ce7915-7907-4d58-a404-fb3633c7d792-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e8ce7915-7907-4d58-a404-fb3633c7d792" (UID: "e8ce7915-7907-4d58-a404-fb3633c7d792"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.893071 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8ce7915-7907-4d58-a404-fb3633c7d792-config" (OuterVolumeSpecName: "config") pod "e8ce7915-7907-4d58-a404-fb3633c7d792" (UID: "e8ce7915-7907-4d58-a404-fb3633c7d792"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.909281 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8ce7915-7907-4d58-a404-fb3633c7d792-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e8ce7915-7907-4d58-a404-fb3633c7d792" (UID: "e8ce7915-7907-4d58-a404-fb3633c7d792"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.936503 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzw45\" (UniqueName: \"kubernetes.io/projected/e8ce7915-7907-4d58-a404-fb3633c7d792-kube-api-access-zzw45\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.936546 4838 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8ce7915-7907-4d58-a404-fb3633c7d792-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.936558 4838 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8ce7915-7907-4d58-a404-fb3633c7d792-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:28 crc kubenswrapper[4838]: I0202 11:11:28.936570 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8ce7915-7907-4d58-a404-fb3633c7d792-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:29 crc kubenswrapper[4838]: W0202 11:11:29.066992 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod582a43e1_d21a_4421_ae28_0eecd147d19e.slice/crio-eff77afcd85fc5d4cf2546f6f9347f95d9a2d51deba24bf0fc59c0b43bb1e7dd WatchSource:0}: Error finding container eff77afcd85fc5d4cf2546f6f9347f95d9a2d51deba24bf0fc59c0b43bb1e7dd: Status 404 returned error can't find the container with id eff77afcd85fc5d4cf2546f6f9347f95d9a2d51deba24bf0fc59c0b43bb1e7dd Feb 02 11:11:29 crc kubenswrapper[4838]: I0202 11:11:29.077093 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-524zj"] Feb 02 11:11:29 crc kubenswrapper[4838]: I0202 11:11:29.141963 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"039ec177-4229-482e-aeec-ec3db4349951","Type":"ContainerStarted","Data":"36042f2a57238fe93af81bcf7422de7a6317bd77934346fa9b0ad87a995f4266"} Feb 02 11:11:29 crc kubenswrapper[4838]: I0202 11:11:29.142237 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Feb 02 11:11:29 crc kubenswrapper[4838]: I0202 11:11:29.143318 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-524zj" event={"ID":"582a43e1-d21a-4421-ae28-0eecd147d19e","Type":"ContainerStarted","Data":"eff77afcd85fc5d4cf2546f6f9347f95d9a2d51deba24bf0fc59c0b43bb1e7dd"} Feb 02 11:11:29 crc kubenswrapper[4838]: I0202 11:11:29.143916 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-v77l6"] Feb 02 11:11:29 crc kubenswrapper[4838]: I0202 11:11:29.147031 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" event={"ID":"e8ce7915-7907-4d58-a404-fb3633c7d792","Type":"ContainerDied","Data":"26cf5a03efbed326fe84adbb17bfb34e060a285b0c2c28d79d92f9f03cd3006c"} Feb 02 11:11:29 crc kubenswrapper[4838]: I0202 11:11:29.147073 4838 scope.go:117] "RemoveContainer" containerID="993b93ff19731716cd1684da8b490e9f73bb6066da04476a8e7010103af6b603" Feb 02 11:11:29 crc kubenswrapper[4838]: I0202 11:11:29.147181 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" Feb 02 11:11:29 crc kubenswrapper[4838]: I0202 11:11:29.149861 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"6e6a9dbb-63ef-4cf2-b725-254ad752937d","Type":"ContainerStarted","Data":"c97649a860b9f18f3098020f0ff72b17ae11a1d370c333ffb93d4637bda931a8"} Feb 02 11:11:29 crc kubenswrapper[4838]: W0202 11:11:29.160245 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod646dcf2a_3e0a_4470_8950_40543c320e6f.slice/crio-6b762e13fcb2e58c16d7890afc3afd96be03766f99f4d33c1717e49b55b61616 WatchSource:0}: Error finding container 6b762e13fcb2e58c16d7890afc3afd96be03766f99f4d33c1717e49b55b61616: Status 404 returned error can't find the container with id 6b762e13fcb2e58c16d7890afc3afd96be03766f99f4d33c1717e49b55b61616 Feb 02 11:11:29 crc kubenswrapper[4838]: I0202 11:11:29.161303 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=9.149722454 podStartE2EDuration="47.161282236s" podCreationTimestamp="2026-02-02 11:10:42 +0000 UTC" firstStartedPulling="2026-02-02 11:10:50.861721851 +0000 UTC m=+1045.198822879" lastFinishedPulling="2026-02-02 11:11:28.873281633 +0000 UTC m=+1083.210382661" observedRunningTime="2026-02-02 11:11:29.156557812 +0000 UTC m=+1083.493658840" watchObservedRunningTime="2026-02-02 11:11:29.161282236 +0000 UTC m=+1083.498383264" Feb 02 11:11:29 crc kubenswrapper[4838]: I0202 11:11:29.175556 4838 scope.go:117] "RemoveContainer" containerID="30a6660035cd2b143ecec5b30fa6e1b14f5de333734e58c6058a748f4867225a" Feb 02 11:11:29 crc kubenswrapper[4838]: I0202 11:11:29.189687 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-mtkxv"] Feb 02 11:11:29 crc kubenswrapper[4838]: I0202 11:11:29.196569 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-mtkxv"] Feb 02 11:11:29 crc kubenswrapper[4838]: I0202 11:11:29.476700 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7fd796d7df-mtkxv" podUID="e8ce7915-7907-4d58-a404-fb3633c7d792" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.109:5353: i/o timeout" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.162699 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"6e6a9dbb-63ef-4cf2-b725-254ad752937d","Type":"ContainerStarted","Data":"5c41114695b7f77b4a5c0355cf3c6e35743af09b62b60f41ad841586e2c63301"} Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.163557 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.166564 4838 generic.go:334] "Generic (PLEG): container finished" podID="646dcf2a-3e0a-4470-8950-40543c320e6f" containerID="38aedb83a626d18618986a5afeb292a4349bd1b05d30b3255777b1ecb084f65d" exitCode=0 Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.166650 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-v77l6" event={"ID":"646dcf2a-3e0a-4470-8950-40543c320e6f","Type":"ContainerDied","Data":"38aedb83a626d18618986a5afeb292a4349bd1b05d30b3255777b1ecb084f65d"} Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.166677 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-v77l6" event={"ID":"646dcf2a-3e0a-4470-8950-40543c320e6f","Type":"ContainerStarted","Data":"6b762e13fcb2e58c16d7890afc3afd96be03766f99f4d33c1717e49b55b61616"} Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.183820 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=3.254701771 podStartE2EDuration="15.183781318s" podCreationTimestamp="2026-02-02 11:11:15 +0000 UTC" firstStartedPulling="2026-02-02 11:11:16.942758198 +0000 UTC m=+1071.279859226" lastFinishedPulling="2026-02-02 11:11:28.871837745 +0000 UTC m=+1083.208938773" observedRunningTime="2026-02-02 11:11:30.179404513 +0000 UTC m=+1084.516505541" watchObservedRunningTime="2026-02-02 11:11:30.183781318 +0000 UTC m=+1084.520882356" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.527015 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8ce7915-7907-4d58-a404-fb3633c7d792" path="/var/lib/kubelet/pods/e8ce7915-7907-4d58-a404-fb3633c7d792/volumes" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.538223 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-94f8-account-create-update-fcsh8"] Feb 02 11:11:30 crc kubenswrapper[4838]: E0202 11:11:30.538691 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8ce7915-7907-4d58-a404-fb3633c7d792" containerName="init" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.538710 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8ce7915-7907-4d58-a404-fb3633c7d792" containerName="init" Feb 02 11:11:30 crc kubenswrapper[4838]: E0202 11:11:30.538747 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8ce7915-7907-4d58-a404-fb3633c7d792" containerName="dnsmasq-dns" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.538758 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8ce7915-7907-4d58-a404-fb3633c7d792" containerName="dnsmasq-dns" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.539021 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8ce7915-7907-4d58-a404-fb3633c7d792" containerName="dnsmasq-dns" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.539989 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-94f8-account-create-update-fcsh8" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.542535 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.550396 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-94f8-account-create-update-fcsh8"] Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.573657 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-dc9m7"] Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.578928 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-dc9m7" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.609969 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-dc9m7"] Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.625888 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.667616 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10c3ebde-9586-4021-8295-261f4305e897-operator-scripts\") pod \"keystone-94f8-account-create-update-fcsh8\" (UID: \"10c3ebde-9586-4021-8295-261f4305e897\") " pod="openstack/keystone-94f8-account-create-update-fcsh8" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.667724 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4lpc\" (UniqueName: \"kubernetes.io/projected/10c3ebde-9586-4021-8295-261f4305e897-kube-api-access-x4lpc\") pod \"keystone-94f8-account-create-update-fcsh8\" (UID: \"10c3ebde-9586-4021-8295-261f4305e897\") " pod="openstack/keystone-94f8-account-create-update-fcsh8" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.697810 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.769090 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10c3ebde-9586-4021-8295-261f4305e897-operator-scripts\") pod \"keystone-94f8-account-create-update-fcsh8\" (UID: \"10c3ebde-9586-4021-8295-261f4305e897\") " pod="openstack/keystone-94f8-account-create-update-fcsh8" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.769181 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46q2n\" (UniqueName: \"kubernetes.io/projected/6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc-kube-api-access-46q2n\") pod \"keystone-db-create-dc9m7\" (UID: \"6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc\") " pod="openstack/keystone-db-create-dc9m7" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.769226 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4lpc\" (UniqueName: \"kubernetes.io/projected/10c3ebde-9586-4021-8295-261f4305e897-kube-api-access-x4lpc\") pod \"keystone-94f8-account-create-update-fcsh8\" (UID: \"10c3ebde-9586-4021-8295-261f4305e897\") " pod="openstack/keystone-94f8-account-create-update-fcsh8" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.769273 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc-operator-scripts\") pod \"keystone-db-create-dc9m7\" (UID: \"6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc\") " pod="openstack/keystone-db-create-dc9m7" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.771027 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10c3ebde-9586-4021-8295-261f4305e897-operator-scripts\") pod \"keystone-94f8-account-create-update-fcsh8\" (UID: \"10c3ebde-9586-4021-8295-261f4305e897\") " pod="openstack/keystone-94f8-account-create-update-fcsh8" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.793417 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4lpc\" (UniqueName: \"kubernetes.io/projected/10c3ebde-9586-4021-8295-261f4305e897-kube-api-access-x4lpc\") pod \"keystone-94f8-account-create-update-fcsh8\" (UID: \"10c3ebde-9586-4021-8295-261f4305e897\") " pod="openstack/keystone-94f8-account-create-update-fcsh8" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.835483 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-sz8sh"] Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.838054 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-sz8sh" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.852832 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-8c85-account-create-update-zthcx"] Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.854092 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8c85-account-create-update-zthcx" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.859430 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.861338 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-sz8sh"] Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.872366 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46q2n\" (UniqueName: \"kubernetes.io/projected/6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc-kube-api-access-46q2n\") pod \"keystone-db-create-dc9m7\" (UID: \"6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc\") " pod="openstack/keystone-db-create-dc9m7" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.872457 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc-operator-scripts\") pod \"keystone-db-create-dc9m7\" (UID: \"6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc\") " pod="openstack/keystone-db-create-dc9m7" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.872835 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-94f8-account-create-update-fcsh8" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.873224 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc-operator-scripts\") pod \"keystone-db-create-dc9m7\" (UID: \"6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc\") " pod="openstack/keystone-db-create-dc9m7" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.885671 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8c85-account-create-update-zthcx"] Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.910365 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46q2n\" (UniqueName: \"kubernetes.io/projected/6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc-kube-api-access-46q2n\") pod \"keystone-db-create-dc9m7\" (UID: \"6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc\") " pod="openstack/keystone-db-create-dc9m7" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.973871 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3eb955c8-5244-437a-976d-4fb19d5177b2-operator-scripts\") pod \"placement-db-create-sz8sh\" (UID: \"3eb955c8-5244-437a-976d-4fb19d5177b2\") " pod="openstack/placement-db-create-sz8sh" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.973958 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fce5584a-406e-424c-af9b-bc1d37d0355c-operator-scripts\") pod \"placement-8c85-account-create-update-zthcx\" (UID: \"fce5584a-406e-424c-af9b-bc1d37d0355c\") " pod="openstack/placement-8c85-account-create-update-zthcx" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.973981 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7c9q\" (UniqueName: \"kubernetes.io/projected/3eb955c8-5244-437a-976d-4fb19d5177b2-kube-api-access-h7c9q\") pod \"placement-db-create-sz8sh\" (UID: \"3eb955c8-5244-437a-976d-4fb19d5177b2\") " pod="openstack/placement-db-create-sz8sh" Feb 02 11:11:30 crc kubenswrapper[4838]: I0202 11:11:30.974027 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cd7kb\" (UniqueName: \"kubernetes.io/projected/fce5584a-406e-424c-af9b-bc1d37d0355c-kube-api-access-cd7kb\") pod \"placement-8c85-account-create-update-zthcx\" (UID: \"fce5584a-406e-424c-af9b-bc1d37d0355c\") " pod="openstack/placement-8c85-account-create-update-zthcx" Feb 02 11:11:31 crc kubenswrapper[4838]: I0202 11:11:31.075188 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fce5584a-406e-424c-af9b-bc1d37d0355c-operator-scripts\") pod \"placement-8c85-account-create-update-zthcx\" (UID: \"fce5584a-406e-424c-af9b-bc1d37d0355c\") " pod="openstack/placement-8c85-account-create-update-zthcx" Feb 02 11:11:31 crc kubenswrapper[4838]: I0202 11:11:31.075244 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7c9q\" (UniqueName: \"kubernetes.io/projected/3eb955c8-5244-437a-976d-4fb19d5177b2-kube-api-access-h7c9q\") pod \"placement-db-create-sz8sh\" (UID: \"3eb955c8-5244-437a-976d-4fb19d5177b2\") " pod="openstack/placement-db-create-sz8sh" Feb 02 11:11:31 crc kubenswrapper[4838]: I0202 11:11:31.075319 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cd7kb\" (UniqueName: \"kubernetes.io/projected/fce5584a-406e-424c-af9b-bc1d37d0355c-kube-api-access-cd7kb\") pod \"placement-8c85-account-create-update-zthcx\" (UID: \"fce5584a-406e-424c-af9b-bc1d37d0355c\") " pod="openstack/placement-8c85-account-create-update-zthcx" Feb 02 11:11:31 crc kubenswrapper[4838]: I0202 11:11:31.075393 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3eb955c8-5244-437a-976d-4fb19d5177b2-operator-scripts\") pod \"placement-db-create-sz8sh\" (UID: \"3eb955c8-5244-437a-976d-4fb19d5177b2\") " pod="openstack/placement-db-create-sz8sh" Feb 02 11:11:31 crc kubenswrapper[4838]: I0202 11:11:31.077950 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3eb955c8-5244-437a-976d-4fb19d5177b2-operator-scripts\") pod \"placement-db-create-sz8sh\" (UID: \"3eb955c8-5244-437a-976d-4fb19d5177b2\") " pod="openstack/placement-db-create-sz8sh" Feb 02 11:11:31 crc kubenswrapper[4838]: I0202 11:11:31.078038 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fce5584a-406e-424c-af9b-bc1d37d0355c-operator-scripts\") pod \"placement-8c85-account-create-update-zthcx\" (UID: \"fce5584a-406e-424c-af9b-bc1d37d0355c\") " pod="openstack/placement-8c85-account-create-update-zthcx" Feb 02 11:11:31 crc kubenswrapper[4838]: I0202 11:11:31.093243 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7c9q\" (UniqueName: \"kubernetes.io/projected/3eb955c8-5244-437a-976d-4fb19d5177b2-kube-api-access-h7c9q\") pod \"placement-db-create-sz8sh\" (UID: \"3eb955c8-5244-437a-976d-4fb19d5177b2\") " pod="openstack/placement-db-create-sz8sh" Feb 02 11:11:31 crc kubenswrapper[4838]: I0202 11:11:31.097429 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cd7kb\" (UniqueName: \"kubernetes.io/projected/fce5584a-406e-424c-af9b-bc1d37d0355c-kube-api-access-cd7kb\") pod \"placement-8c85-account-create-update-zthcx\" (UID: \"fce5584a-406e-424c-af9b-bc1d37d0355c\") " pod="openstack/placement-8c85-account-create-update-zthcx" Feb 02 11:11:31 crc kubenswrapper[4838]: I0202 11:11:31.165147 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-sz8sh" Feb 02 11:11:31 crc kubenswrapper[4838]: I0202 11:11:31.172886 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8c85-account-create-update-zthcx" Feb 02 11:11:31 crc kubenswrapper[4838]: I0202 11:11:31.184211 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-v77l6" event={"ID":"646dcf2a-3e0a-4470-8950-40543c320e6f","Type":"ContainerStarted","Data":"9213026ee7acb986896eb0310d7f2e3cc1dd16e44607d8b4bbfe030fc2679c90"} Feb 02 11:11:31 crc kubenswrapper[4838]: I0202 11:11:31.201178 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-dc9m7" Feb 02 11:11:31 crc kubenswrapper[4838]: I0202 11:11:31.212497 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-94f8-account-create-update-fcsh8"] Feb 02 11:11:31 crc kubenswrapper[4838]: I0202 11:11:31.217400 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-v77l6" podStartSLOduration=9.217385382 podStartE2EDuration="9.217385382s" podCreationTimestamp="2026-02-02 11:11:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:11:31.215491232 +0000 UTC m=+1085.552592260" watchObservedRunningTime="2026-02-02 11:11:31.217385382 +0000 UTC m=+1085.554486410" Feb 02 11:11:31 crc kubenswrapper[4838]: W0202 11:11:31.231946 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod10c3ebde_9586_4021_8295_261f4305e897.slice/crio-38142d820a09eaca4d59be1609d519e25e05f5793eff411ee85a97d58791a1a1 WatchSource:0}: Error finding container 38142d820a09eaca4d59be1609d519e25e05f5793eff411ee85a97d58791a1a1: Status 404 returned error can't find the container with id 38142d820a09eaca4d59be1609d519e25e05f5793eff411ee85a97d58791a1a1 Feb 02 11:11:31 crc kubenswrapper[4838]: I0202 11:11:31.457116 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-sz8sh"] Feb 02 11:11:31 crc kubenswrapper[4838]: W0202 11:11:31.463951 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3eb955c8_5244_437a_976d_4fb19d5177b2.slice/crio-83cc9d1addabba012e60c366749bcc8a371896590ff8f9b68223e2ea967afa68 WatchSource:0}: Error finding container 83cc9d1addabba012e60c366749bcc8a371896590ff8f9b68223e2ea967afa68: Status 404 returned error can't find the container with id 83cc9d1addabba012e60c366749bcc8a371896590ff8f9b68223e2ea967afa68 Feb 02 11:11:31 crc kubenswrapper[4838]: I0202 11:11:31.717051 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8c85-account-create-update-zthcx"] Feb 02 11:11:31 crc kubenswrapper[4838]: I0202 11:11:31.821267 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-dc9m7"] Feb 02 11:11:32 crc kubenswrapper[4838]: I0202 11:11:31.999726 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-etc-swift\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:32 crc kubenswrapper[4838]: E0202 11:11:31.999904 4838 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 02 11:11:32 crc kubenswrapper[4838]: E0202 11:11:31.999931 4838 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 02 11:11:32 crc kubenswrapper[4838]: E0202 11:11:31.999987 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-etc-swift podName:12e53950-9847-46b2-a51a-1fac5b690098 nodeName:}" failed. No retries permitted until 2026-02-02 11:11:39.999968508 +0000 UTC m=+1094.337069536 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-etc-swift") pod "swift-storage-0" (UID: "12e53950-9847-46b2-a51a-1fac5b690098") : configmap "swift-ring-files" not found Feb 02 11:11:32 crc kubenswrapper[4838]: I0202 11:11:32.191906 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-sz8sh" event={"ID":"3eb955c8-5244-437a-976d-4fb19d5177b2","Type":"ContainerStarted","Data":"83cc9d1addabba012e60c366749bcc8a371896590ff8f9b68223e2ea967afa68"} Feb 02 11:11:32 crc kubenswrapper[4838]: I0202 11:11:32.193285 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-94f8-account-create-update-fcsh8" event={"ID":"10c3ebde-9586-4021-8295-261f4305e897","Type":"ContainerStarted","Data":"811fa5ec02e5af3a07eed85a8bf177fec1894f8fc75301b3a3a5ef3e9f5a0002"} Feb 02 11:11:32 crc kubenswrapper[4838]: I0202 11:11:32.193345 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-94f8-account-create-update-fcsh8" event={"ID":"10c3ebde-9586-4021-8295-261f4305e897","Type":"ContainerStarted","Data":"38142d820a09eaca4d59be1609d519e25e05f5793eff411ee85a97d58791a1a1"} Feb 02 11:11:32 crc kubenswrapper[4838]: I0202 11:11:32.193661 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:11:32 crc kubenswrapper[4838]: I0202 11:11:32.220206 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-94f8-account-create-update-fcsh8" podStartSLOduration=2.220186965 podStartE2EDuration="2.220186965s" podCreationTimestamp="2026-02-02 11:11:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:11:32.211962398 +0000 UTC m=+1086.549063436" watchObservedRunningTime="2026-02-02 11:11:32.220186965 +0000 UTC m=+1086.557287993" Feb 02 11:11:34 crc kubenswrapper[4838]: W0202 11:11:34.122784 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfce5584a_406e_424c_af9b_bc1d37d0355c.slice/crio-6d5917ddb4d42ba1e3d412858d38b1d7699f205487727b418fd3d5c126efdfa8 WatchSource:0}: Error finding container 6d5917ddb4d42ba1e3d412858d38b1d7699f205487727b418fd3d5c126efdfa8: Status 404 returned error can't find the container with id 6d5917ddb4d42ba1e3d412858d38b1d7699f205487727b418fd3d5c126efdfa8 Feb 02 11:11:34 crc kubenswrapper[4838]: W0202 11:11:34.125065 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6fe795b0_4a13_4de2_ae9e_67dfdf6ee5dc.slice/crio-167338f61868a34d445ed60aad3c18e63ec0f31e3a608b31c1bb247f18128b63 WatchSource:0}: Error finding container 167338f61868a34d445ed60aad3c18e63ec0f31e3a608b31c1bb247f18128b63: Status 404 returned error can't find the container with id 167338f61868a34d445ed60aad3c18e63ec0f31e3a608b31c1bb247f18128b63 Feb 02 11:11:34 crc kubenswrapper[4838]: I0202 11:11:34.212276 4838 generic.go:334] "Generic (PLEG): container finished" podID="10c3ebde-9586-4021-8295-261f4305e897" containerID="811fa5ec02e5af3a07eed85a8bf177fec1894f8fc75301b3a3a5ef3e9f5a0002" exitCode=0 Feb 02 11:11:34 crc kubenswrapper[4838]: I0202 11:11:34.212352 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-94f8-account-create-update-fcsh8" event={"ID":"10c3ebde-9586-4021-8295-261f4305e897","Type":"ContainerDied","Data":"811fa5ec02e5af3a07eed85a8bf177fec1894f8fc75301b3a3a5ef3e9f5a0002"} Feb 02 11:11:34 crc kubenswrapper[4838]: I0202 11:11:34.213774 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8c85-account-create-update-zthcx" event={"ID":"fce5584a-406e-424c-af9b-bc1d37d0355c","Type":"ContainerStarted","Data":"6d5917ddb4d42ba1e3d412858d38b1d7699f205487727b418fd3d5c126efdfa8"} Feb 02 11:11:34 crc kubenswrapper[4838]: I0202 11:11:34.215667 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-dc9m7" event={"ID":"6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc","Type":"ContainerStarted","Data":"167338f61868a34d445ed60aad3c18e63ec0f31e3a608b31c1bb247f18128b63"} Feb 02 11:11:35 crc kubenswrapper[4838]: I0202 11:11:35.226698 4838 generic.go:334] "Generic (PLEG): container finished" podID="fce5584a-406e-424c-af9b-bc1d37d0355c" containerID="3dd7551e32793babb9d09a22aba1f3fe39d3a9f1b36d4d9d2acd43ed93eaa54f" exitCode=0 Feb 02 11:11:35 crc kubenswrapper[4838]: I0202 11:11:35.226908 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8c85-account-create-update-zthcx" event={"ID":"fce5584a-406e-424c-af9b-bc1d37d0355c","Type":"ContainerDied","Data":"3dd7551e32793babb9d09a22aba1f3fe39d3a9f1b36d4d9d2acd43ed93eaa54f"} Feb 02 11:11:35 crc kubenswrapper[4838]: I0202 11:11:35.229491 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-524zj" event={"ID":"582a43e1-d21a-4421-ae28-0eecd147d19e","Type":"ContainerStarted","Data":"f9b38633054e1d3ea7cace5e8e78d194648526d0b6b0f1fbcabe6063d40712e5"} Feb 02 11:11:35 crc kubenswrapper[4838]: I0202 11:11:35.231478 4838 generic.go:334] "Generic (PLEG): container finished" podID="6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc" containerID="b249dd2e7093a34f01285dc32cebbccc3bb5e23b8b74ddb1aa56b79ccd260288" exitCode=0 Feb 02 11:11:35 crc kubenswrapper[4838]: I0202 11:11:35.231516 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-dc9m7" event={"ID":"6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc","Type":"ContainerDied","Data":"b249dd2e7093a34f01285dc32cebbccc3bb5e23b8b74ddb1aa56b79ccd260288"} Feb 02 11:11:35 crc kubenswrapper[4838]: I0202 11:11:35.233162 4838 generic.go:334] "Generic (PLEG): container finished" podID="3eb955c8-5244-437a-976d-4fb19d5177b2" containerID="a78ef2ce03949be516acb257c362be842f477d8e0584df55fdc99a9455daf185" exitCode=0 Feb 02 11:11:35 crc kubenswrapper[4838]: I0202 11:11:35.233244 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-sz8sh" event={"ID":"3eb955c8-5244-437a-976d-4fb19d5177b2","Type":"ContainerDied","Data":"a78ef2ce03949be516acb257c362be842f477d8e0584df55fdc99a9455daf185"} Feb 02 11:11:35 crc kubenswrapper[4838]: I0202 11:11:35.282257 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-524zj" podStartSLOduration=2.105644938 podStartE2EDuration="7.282230945s" podCreationTimestamp="2026-02-02 11:11:28 +0000 UTC" firstStartedPulling="2026-02-02 11:11:29.070629966 +0000 UTC m=+1083.407730994" lastFinishedPulling="2026-02-02 11:11:34.247215973 +0000 UTC m=+1088.584317001" observedRunningTime="2026-02-02 11:11:35.277026067 +0000 UTC m=+1089.614127145" watchObservedRunningTime="2026-02-02 11:11:35.282230945 +0000 UTC m=+1089.619332003" Feb 02 11:11:35 crc kubenswrapper[4838]: I0202 11:11:35.604319 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-94f8-account-create-update-fcsh8" Feb 02 11:11:35 crc kubenswrapper[4838]: I0202 11:11:35.659717 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4lpc\" (UniqueName: \"kubernetes.io/projected/10c3ebde-9586-4021-8295-261f4305e897-kube-api-access-x4lpc\") pod \"10c3ebde-9586-4021-8295-261f4305e897\" (UID: \"10c3ebde-9586-4021-8295-261f4305e897\") " Feb 02 11:11:35 crc kubenswrapper[4838]: I0202 11:11:35.659803 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10c3ebde-9586-4021-8295-261f4305e897-operator-scripts\") pod \"10c3ebde-9586-4021-8295-261f4305e897\" (UID: \"10c3ebde-9586-4021-8295-261f4305e897\") " Feb 02 11:11:35 crc kubenswrapper[4838]: I0202 11:11:35.660262 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10c3ebde-9586-4021-8295-261f4305e897-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "10c3ebde-9586-4021-8295-261f4305e897" (UID: "10c3ebde-9586-4021-8295-261f4305e897"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:35 crc kubenswrapper[4838]: I0202 11:11:35.660747 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10c3ebde-9586-4021-8295-261f4305e897-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:35 crc kubenswrapper[4838]: I0202 11:11:35.665318 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10c3ebde-9586-4021-8295-261f4305e897-kube-api-access-x4lpc" (OuterVolumeSpecName: "kube-api-access-x4lpc") pod "10c3ebde-9586-4021-8295-261f4305e897" (UID: "10c3ebde-9586-4021-8295-261f4305e897"). InnerVolumeSpecName "kube-api-access-x4lpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:11:35 crc kubenswrapper[4838]: I0202 11:11:35.761874 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4lpc\" (UniqueName: \"kubernetes.io/projected/10c3ebde-9586-4021-8295-261f4305e897-kube-api-access-x4lpc\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.241013 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-94f8-account-create-update-fcsh8" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.243791 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-94f8-account-create-update-fcsh8" event={"ID":"10c3ebde-9586-4021-8295-261f4305e897","Type":"ContainerDied","Data":"38142d820a09eaca4d59be1609d519e25e05f5793eff411ee85a97d58791a1a1"} Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.243919 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="38142d820a09eaca4d59be1609d519e25e05f5793eff411ee85a97d58791a1a1" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.308910 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-vl4tb"] Feb 02 11:11:36 crc kubenswrapper[4838]: E0202 11:11:36.309414 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10c3ebde-9586-4021-8295-261f4305e897" containerName="mariadb-account-create-update" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.309438 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="10c3ebde-9586-4021-8295-261f4305e897" containerName="mariadb-account-create-update" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.309695 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="10c3ebde-9586-4021-8295-261f4305e897" containerName="mariadb-account-create-update" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.310395 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-vl4tb" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.315277 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-vl4tb"] Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.374686 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c723ca1-2908-4fb1-820a-c440fbf6616c-operator-scripts\") pod \"glance-db-create-vl4tb\" (UID: \"3c723ca1-2908-4fb1-820a-c440fbf6616c\") " pod="openstack/glance-db-create-vl4tb" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.374745 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2scf\" (UniqueName: \"kubernetes.io/projected/3c723ca1-2908-4fb1-820a-c440fbf6616c-kube-api-access-j2scf\") pod \"glance-db-create-vl4tb\" (UID: \"3c723ca1-2908-4fb1-820a-c440fbf6616c\") " pod="openstack/glance-db-create-vl4tb" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.399751 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-5e8e-account-create-update-55tw9"] Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.401258 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5e8e-account-create-update-55tw9" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.405328 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.411917 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-5e8e-account-create-update-55tw9"] Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.476375 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c723ca1-2908-4fb1-820a-c440fbf6616c-operator-scripts\") pod \"glance-db-create-vl4tb\" (UID: \"3c723ca1-2908-4fb1-820a-c440fbf6616c\") " pod="openstack/glance-db-create-vl4tb" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.477020 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c723ca1-2908-4fb1-820a-c440fbf6616c-operator-scripts\") pod \"glance-db-create-vl4tb\" (UID: \"3c723ca1-2908-4fb1-820a-c440fbf6616c\") " pod="openstack/glance-db-create-vl4tb" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.477091 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2scf\" (UniqueName: \"kubernetes.io/projected/3c723ca1-2908-4fb1-820a-c440fbf6616c-kube-api-access-j2scf\") pod \"glance-db-create-vl4tb\" (UID: \"3c723ca1-2908-4fb1-820a-c440fbf6616c\") " pod="openstack/glance-db-create-vl4tb" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.477781 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1eed330b-d64c-42c0-a440-736df7d0d861-operator-scripts\") pod \"glance-5e8e-account-create-update-55tw9\" (UID: \"1eed330b-d64c-42c0-a440-736df7d0d861\") " pod="openstack/glance-5e8e-account-create-update-55tw9" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.477895 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bnfk\" (UniqueName: \"kubernetes.io/projected/1eed330b-d64c-42c0-a440-736df7d0d861-kube-api-access-9bnfk\") pod \"glance-5e8e-account-create-update-55tw9\" (UID: \"1eed330b-d64c-42c0-a440-736df7d0d861\") " pod="openstack/glance-5e8e-account-create-update-55tw9" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.498970 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2scf\" (UniqueName: \"kubernetes.io/projected/3c723ca1-2908-4fb1-820a-c440fbf6616c-kube-api-access-j2scf\") pod \"glance-db-create-vl4tb\" (UID: \"3c723ca1-2908-4fb1-820a-c440fbf6616c\") " pod="openstack/glance-db-create-vl4tb" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.580206 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1eed330b-d64c-42c0-a440-736df7d0d861-operator-scripts\") pod \"glance-5e8e-account-create-update-55tw9\" (UID: \"1eed330b-d64c-42c0-a440-736df7d0d861\") " pod="openstack/glance-5e8e-account-create-update-55tw9" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.580280 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bnfk\" (UniqueName: \"kubernetes.io/projected/1eed330b-d64c-42c0-a440-736df7d0d861-kube-api-access-9bnfk\") pod \"glance-5e8e-account-create-update-55tw9\" (UID: \"1eed330b-d64c-42c0-a440-736df7d0d861\") " pod="openstack/glance-5e8e-account-create-update-55tw9" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.583351 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1eed330b-d64c-42c0-a440-736df7d0d861-operator-scripts\") pod \"glance-5e8e-account-create-update-55tw9\" (UID: \"1eed330b-d64c-42c0-a440-736df7d0d861\") " pod="openstack/glance-5e8e-account-create-update-55tw9" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.600896 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bnfk\" (UniqueName: \"kubernetes.io/projected/1eed330b-d64c-42c0-a440-736df7d0d861-kube-api-access-9bnfk\") pod \"glance-5e8e-account-create-update-55tw9\" (UID: \"1eed330b-d64c-42c0-a440-736df7d0d861\") " pod="openstack/glance-5e8e-account-create-update-55tw9" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.653472 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-vl4tb" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.718712 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5e8e-account-create-update-55tw9" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.807596 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-dc9m7" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.824599 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8c85-account-create-update-zthcx" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.842352 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-sz8sh" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.887219 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3eb955c8-5244-437a-976d-4fb19d5177b2-operator-scripts\") pod \"3eb955c8-5244-437a-976d-4fb19d5177b2\" (UID: \"3eb955c8-5244-437a-976d-4fb19d5177b2\") " Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.887329 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cd7kb\" (UniqueName: \"kubernetes.io/projected/fce5584a-406e-424c-af9b-bc1d37d0355c-kube-api-access-cd7kb\") pod \"fce5584a-406e-424c-af9b-bc1d37d0355c\" (UID: \"fce5584a-406e-424c-af9b-bc1d37d0355c\") " Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.887362 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46q2n\" (UniqueName: \"kubernetes.io/projected/6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc-kube-api-access-46q2n\") pod \"6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc\" (UID: \"6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc\") " Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.887377 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7c9q\" (UniqueName: \"kubernetes.io/projected/3eb955c8-5244-437a-976d-4fb19d5177b2-kube-api-access-h7c9q\") pod \"3eb955c8-5244-437a-976d-4fb19d5177b2\" (UID: \"3eb955c8-5244-437a-976d-4fb19d5177b2\") " Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.887437 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc-operator-scripts\") pod \"6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc\" (UID: \"6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc\") " Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.887454 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fce5584a-406e-424c-af9b-bc1d37d0355c-operator-scripts\") pod \"fce5584a-406e-424c-af9b-bc1d37d0355c\" (UID: \"fce5584a-406e-424c-af9b-bc1d37d0355c\") " Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.888025 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3eb955c8-5244-437a-976d-4fb19d5177b2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3eb955c8-5244-437a-976d-4fb19d5177b2" (UID: "3eb955c8-5244-437a-976d-4fb19d5177b2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.888120 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fce5584a-406e-424c-af9b-bc1d37d0355c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fce5584a-406e-424c-af9b-bc1d37d0355c" (UID: "fce5584a-406e-424c-af9b-bc1d37d0355c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.888498 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc" (UID: "6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.892258 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc-kube-api-access-46q2n" (OuterVolumeSpecName: "kube-api-access-46q2n") pod "6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc" (UID: "6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc"). InnerVolumeSpecName "kube-api-access-46q2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.894584 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fce5584a-406e-424c-af9b-bc1d37d0355c-kube-api-access-cd7kb" (OuterVolumeSpecName: "kube-api-access-cd7kb") pod "fce5584a-406e-424c-af9b-bc1d37d0355c" (UID: "fce5584a-406e-424c-af9b-bc1d37d0355c"). InnerVolumeSpecName "kube-api-access-cd7kb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.896736 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3eb955c8-5244-437a-976d-4fb19d5177b2-kube-api-access-h7c9q" (OuterVolumeSpecName: "kube-api-access-h7c9q") pod "3eb955c8-5244-437a-976d-4fb19d5177b2" (UID: "3eb955c8-5244-437a-976d-4fb19d5177b2"). InnerVolumeSpecName "kube-api-access-h7c9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.989292 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46q2n\" (UniqueName: \"kubernetes.io/projected/6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc-kube-api-access-46q2n\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.989329 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7c9q\" (UniqueName: \"kubernetes.io/projected/3eb955c8-5244-437a-976d-4fb19d5177b2-kube-api-access-h7c9q\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.989343 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.989355 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fce5584a-406e-424c-af9b-bc1d37d0355c-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.989367 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3eb955c8-5244-437a-976d-4fb19d5177b2-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:36 crc kubenswrapper[4838]: I0202 11:11:36.989381 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cd7kb\" (UniqueName: \"kubernetes.io/projected/fce5584a-406e-424c-af9b-bc1d37d0355c-kube-api-access-cd7kb\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.100167 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-vl4tb"] Feb 02 11:11:37 crc kubenswrapper[4838]: W0202 11:11:37.106574 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c723ca1_2908_4fb1_820a_c440fbf6616c.slice/crio-f8e6ca73e4d5375273abccb7db03f59d6363584a0a835648004b0baeb2301ea9 WatchSource:0}: Error finding container f8e6ca73e4d5375273abccb7db03f59d6363584a0a835648004b0baeb2301ea9: Status 404 returned error can't find the container with id f8e6ca73e4d5375273abccb7db03f59d6363584a0a835648004b0baeb2301ea9 Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.199241 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-5e8e-account-create-update-55tw9"] Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.249680 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-vl4tb" event={"ID":"3c723ca1-2908-4fb1-820a-c440fbf6616c","Type":"ContainerStarted","Data":"8bc3328f33c16630f050f784999fade725650a89298eb28f8fb690df5e4cdfb6"} Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.250777 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-vl4tb" event={"ID":"3c723ca1-2908-4fb1-820a-c440fbf6616c","Type":"ContainerStarted","Data":"f8e6ca73e4d5375273abccb7db03f59d6363584a0a835648004b0baeb2301ea9"} Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.252261 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-dc9m7" event={"ID":"6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc","Type":"ContainerDied","Data":"167338f61868a34d445ed60aad3c18e63ec0f31e3a608b31c1bb247f18128b63"} Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.252364 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="167338f61868a34d445ed60aad3c18e63ec0f31e3a608b31c1bb247f18128b63" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.252465 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-dc9m7" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.255105 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-sz8sh" event={"ID":"3eb955c8-5244-437a-976d-4fb19d5177b2","Type":"ContainerDied","Data":"83cc9d1addabba012e60c366749bcc8a371896590ff8f9b68223e2ea967afa68"} Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.255149 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83cc9d1addabba012e60c366749bcc8a371896590ff8f9b68223e2ea967afa68" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.255216 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-sz8sh" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.269303 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8c85-account-create-update-zthcx" event={"ID":"fce5584a-406e-424c-af9b-bc1d37d0355c","Type":"ContainerDied","Data":"6d5917ddb4d42ba1e3d412858d38b1d7699f205487727b418fd3d5c126efdfa8"} Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.269845 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8c85-account-create-update-zthcx" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.270401 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d5917ddb4d42ba1e3d412858d38b1d7699f205487727b418fd3d5c126efdfa8" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.271815 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-vl4tb" podStartSLOduration=1.271805125 podStartE2EDuration="1.271805125s" podCreationTimestamp="2026-02-02 11:11:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:11:37.268801506 +0000 UTC m=+1091.605902544" watchObservedRunningTime="2026-02-02 11:11:37.271805125 +0000 UTC m=+1091.608906153" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.274780 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5e8e-account-create-update-55tw9" event={"ID":"1eed330b-d64c-42c0-a440-736df7d0d861","Type":"ContainerStarted","Data":"27b08ba410e1bd9b68f9c2458d84d01365ccb10bc078f122a379cd1f0c126d41"} Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.673658 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-c42pv"] Feb 02 11:11:37 crc kubenswrapper[4838]: E0202 11:11:37.674140 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fce5584a-406e-424c-af9b-bc1d37d0355c" containerName="mariadb-account-create-update" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.674163 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="fce5584a-406e-424c-af9b-bc1d37d0355c" containerName="mariadb-account-create-update" Feb 02 11:11:37 crc kubenswrapper[4838]: E0202 11:11:37.674180 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc" containerName="mariadb-database-create" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.674186 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc" containerName="mariadb-database-create" Feb 02 11:11:37 crc kubenswrapper[4838]: E0202 11:11:37.674212 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3eb955c8-5244-437a-976d-4fb19d5177b2" containerName="mariadb-database-create" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.674218 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="3eb955c8-5244-437a-976d-4fb19d5177b2" containerName="mariadb-database-create" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.674366 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="fce5584a-406e-424c-af9b-bc1d37d0355c" containerName="mariadb-account-create-update" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.674384 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc" containerName="mariadb-database-create" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.674397 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="3eb955c8-5244-437a-976d-4fb19d5177b2" containerName="mariadb-database-create" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.675067 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-c42pv" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.677680 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.683969 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-c42pv"] Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.702193 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ffeae1a4-9be7-4108-a0d7-3ff4f54eb669-operator-scripts\") pod \"root-account-create-update-c42pv\" (UID: \"ffeae1a4-9be7-4108-a0d7-3ff4f54eb669\") " pod="openstack/root-account-create-update-c42pv" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.702553 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8f6gc\" (UniqueName: \"kubernetes.io/projected/ffeae1a4-9be7-4108-a0d7-3ff4f54eb669-kube-api-access-8f6gc\") pod \"root-account-create-update-c42pv\" (UID: \"ffeae1a4-9be7-4108-a0d7-3ff4f54eb669\") " pod="openstack/root-account-create-update-c42pv" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.804340 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ffeae1a4-9be7-4108-a0d7-3ff4f54eb669-operator-scripts\") pod \"root-account-create-update-c42pv\" (UID: \"ffeae1a4-9be7-4108-a0d7-3ff4f54eb669\") " pod="openstack/root-account-create-update-c42pv" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.804407 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8f6gc\" (UniqueName: \"kubernetes.io/projected/ffeae1a4-9be7-4108-a0d7-3ff4f54eb669-kube-api-access-8f6gc\") pod \"root-account-create-update-c42pv\" (UID: \"ffeae1a4-9be7-4108-a0d7-3ff4f54eb669\") " pod="openstack/root-account-create-update-c42pv" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.805131 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ffeae1a4-9be7-4108-a0d7-3ff4f54eb669-operator-scripts\") pod \"root-account-create-update-c42pv\" (UID: \"ffeae1a4-9be7-4108-a0d7-3ff4f54eb669\") " pod="openstack/root-account-create-update-c42pv" Feb 02 11:11:37 crc kubenswrapper[4838]: I0202 11:11:37.821729 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8f6gc\" (UniqueName: \"kubernetes.io/projected/ffeae1a4-9be7-4108-a0d7-3ff4f54eb669-kube-api-access-8f6gc\") pod \"root-account-create-update-c42pv\" (UID: \"ffeae1a4-9be7-4108-a0d7-3ff4f54eb669\") " pod="openstack/root-account-create-update-c42pv" Feb 02 11:11:38 crc kubenswrapper[4838]: I0202 11:11:38.006645 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-c42pv" Feb 02 11:11:38 crc kubenswrapper[4838]: I0202 11:11:38.298024 4838 generic.go:334] "Generic (PLEG): container finished" podID="1eed330b-d64c-42c0-a440-736df7d0d861" containerID="afb6f33e8b1bcc250864e5db117f04acf89cad03722c422bbe6e36d83867aac2" exitCode=0 Feb 02 11:11:38 crc kubenswrapper[4838]: I0202 11:11:38.298080 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5e8e-account-create-update-55tw9" event={"ID":"1eed330b-d64c-42c0-a440-736df7d0d861","Type":"ContainerDied","Data":"afb6f33e8b1bcc250864e5db117f04acf89cad03722c422bbe6e36d83867aac2"} Feb 02 11:11:38 crc kubenswrapper[4838]: I0202 11:11:38.299291 4838 generic.go:334] "Generic (PLEG): container finished" podID="3c723ca1-2908-4fb1-820a-c440fbf6616c" containerID="8bc3328f33c16630f050f784999fade725650a89298eb28f8fb690df5e4cdfb6" exitCode=0 Feb 02 11:11:38 crc kubenswrapper[4838]: I0202 11:11:38.299312 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-vl4tb" event={"ID":"3c723ca1-2908-4fb1-820a-c440fbf6616c","Type":"ContainerDied","Data":"8bc3328f33c16630f050f784999fade725650a89298eb28f8fb690df5e4cdfb6"} Feb 02 11:11:38 crc kubenswrapper[4838]: I0202 11:11:38.566943 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:11:38 crc kubenswrapper[4838]: I0202 11:11:38.574602 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-c42pv"] Feb 02 11:11:38 crc kubenswrapper[4838]: I0202 11:11:38.660742 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-zhs6w"] Feb 02 11:11:38 crc kubenswrapper[4838]: I0202 11:11:38.661104 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" podUID="a321e834-3079-4d1d-b223-08c4ce184d81" containerName="dnsmasq-dns" containerID="cri-o://0fb71b06c761edfcd0b4055c79cd9d1bd826ac3c32468a35a80a1df6b132818b" gracePeriod=10 Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.142274 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.230823 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-config\") pod \"a321e834-3079-4d1d-b223-08c4ce184d81\" (UID: \"a321e834-3079-4d1d-b223-08c4ce184d81\") " Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.230950 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6q6jl\" (UniqueName: \"kubernetes.io/projected/a321e834-3079-4d1d-b223-08c4ce184d81-kube-api-access-6q6jl\") pod \"a321e834-3079-4d1d-b223-08c4ce184d81\" (UID: \"a321e834-3079-4d1d-b223-08c4ce184d81\") " Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.230982 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-ovsdbserver-sb\") pod \"a321e834-3079-4d1d-b223-08c4ce184d81\" (UID: \"a321e834-3079-4d1d-b223-08c4ce184d81\") " Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.231418 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-dns-svc\") pod \"a321e834-3079-4d1d-b223-08c4ce184d81\" (UID: \"a321e834-3079-4d1d-b223-08c4ce184d81\") " Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.231443 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-ovsdbserver-nb\") pod \"a321e834-3079-4d1d-b223-08c4ce184d81\" (UID: \"a321e834-3079-4d1d-b223-08c4ce184d81\") " Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.236612 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a321e834-3079-4d1d-b223-08c4ce184d81-kube-api-access-6q6jl" (OuterVolumeSpecName: "kube-api-access-6q6jl") pod "a321e834-3079-4d1d-b223-08c4ce184d81" (UID: "a321e834-3079-4d1d-b223-08c4ce184d81"). InnerVolumeSpecName "kube-api-access-6q6jl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.275891 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a321e834-3079-4d1d-b223-08c4ce184d81" (UID: "a321e834-3079-4d1d-b223-08c4ce184d81"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.279452 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a321e834-3079-4d1d-b223-08c4ce184d81" (UID: "a321e834-3079-4d1d-b223-08c4ce184d81"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.285479 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a321e834-3079-4d1d-b223-08c4ce184d81" (UID: "a321e834-3079-4d1d-b223-08c4ce184d81"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.287422 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-config" (OuterVolumeSpecName: "config") pod "a321e834-3079-4d1d-b223-08c4ce184d81" (UID: "a321e834-3079-4d1d-b223-08c4ce184d81"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.312135 4838 generic.go:334] "Generic (PLEG): container finished" podID="a321e834-3079-4d1d-b223-08c4ce184d81" containerID="0fb71b06c761edfcd0b4055c79cd9d1bd826ac3c32468a35a80a1df6b132818b" exitCode=0 Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.312256 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.312273 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" event={"ID":"a321e834-3079-4d1d-b223-08c4ce184d81","Type":"ContainerDied","Data":"0fb71b06c761edfcd0b4055c79cd9d1bd826ac3c32468a35a80a1df6b132818b"} Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.312504 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-zhs6w" event={"ID":"a321e834-3079-4d1d-b223-08c4ce184d81","Type":"ContainerDied","Data":"40d34f35eb847980e2e2aa065aa4a4ead3c36b14ba16e51446d6dcf2b664763a"} Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.312537 4838 scope.go:117] "RemoveContainer" containerID="0fb71b06c761edfcd0b4055c79cd9d1bd826ac3c32468a35a80a1df6b132818b" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.321083 4838 generic.go:334] "Generic (PLEG): container finished" podID="ffeae1a4-9be7-4108-a0d7-3ff4f54eb669" containerID="ee21caf870be62380ef54f8dab0bbebb35d85ef7d0f39eeb969c057a9fbad71f" exitCode=0 Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.321230 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-c42pv" event={"ID":"ffeae1a4-9be7-4108-a0d7-3ff4f54eb669","Type":"ContainerDied","Data":"ee21caf870be62380ef54f8dab0bbebb35d85ef7d0f39eeb969c057a9fbad71f"} Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.321322 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-c42pv" event={"ID":"ffeae1a4-9be7-4108-a0d7-3ff4f54eb669","Type":"ContainerStarted","Data":"3813f064f7aac3097aa3d7c6661f342e0f5b7dcb281a2ead0cb4e968d7d5f4a9"} Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.342712 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.342743 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6q6jl\" (UniqueName: \"kubernetes.io/projected/a321e834-3079-4d1d-b223-08c4ce184d81-kube-api-access-6q6jl\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.342756 4838 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.342765 4838 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.342775 4838 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a321e834-3079-4d1d-b223-08c4ce184d81-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.353938 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-zhs6w"] Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.355351 4838 scope.go:117] "RemoveContainer" containerID="338900de346508d9b483091abf67b1b8929c00ba58902bff130a5ad5765bfd93" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.360758 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-zhs6w"] Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.373599 4838 scope.go:117] "RemoveContainer" containerID="0fb71b06c761edfcd0b4055c79cd9d1bd826ac3c32468a35a80a1df6b132818b" Feb 02 11:11:39 crc kubenswrapper[4838]: E0202 11:11:39.374161 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0fb71b06c761edfcd0b4055c79cd9d1bd826ac3c32468a35a80a1df6b132818b\": container with ID starting with 0fb71b06c761edfcd0b4055c79cd9d1bd826ac3c32468a35a80a1df6b132818b not found: ID does not exist" containerID="0fb71b06c761edfcd0b4055c79cd9d1bd826ac3c32468a35a80a1df6b132818b" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.374213 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0fb71b06c761edfcd0b4055c79cd9d1bd826ac3c32468a35a80a1df6b132818b"} err="failed to get container status \"0fb71b06c761edfcd0b4055c79cd9d1bd826ac3c32468a35a80a1df6b132818b\": rpc error: code = NotFound desc = could not find container \"0fb71b06c761edfcd0b4055c79cd9d1bd826ac3c32468a35a80a1df6b132818b\": container with ID starting with 0fb71b06c761edfcd0b4055c79cd9d1bd826ac3c32468a35a80a1df6b132818b not found: ID does not exist" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.374247 4838 scope.go:117] "RemoveContainer" containerID="338900de346508d9b483091abf67b1b8929c00ba58902bff130a5ad5765bfd93" Feb 02 11:11:39 crc kubenswrapper[4838]: E0202 11:11:39.374740 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"338900de346508d9b483091abf67b1b8929c00ba58902bff130a5ad5765bfd93\": container with ID starting with 338900de346508d9b483091abf67b1b8929c00ba58902bff130a5ad5765bfd93 not found: ID does not exist" containerID="338900de346508d9b483091abf67b1b8929c00ba58902bff130a5ad5765bfd93" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.374785 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"338900de346508d9b483091abf67b1b8929c00ba58902bff130a5ad5765bfd93"} err="failed to get container status \"338900de346508d9b483091abf67b1b8929c00ba58902bff130a5ad5765bfd93\": rpc error: code = NotFound desc = could not find container \"338900de346508d9b483091abf67b1b8929c00ba58902bff130a5ad5765bfd93\": container with ID starting with 338900de346508d9b483091abf67b1b8929c00ba58902bff130a5ad5765bfd93 not found: ID does not exist" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.651406 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5e8e-account-create-update-55tw9" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.750315 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bnfk\" (UniqueName: \"kubernetes.io/projected/1eed330b-d64c-42c0-a440-736df7d0d861-kube-api-access-9bnfk\") pod \"1eed330b-d64c-42c0-a440-736df7d0d861\" (UID: \"1eed330b-d64c-42c0-a440-736df7d0d861\") " Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.750374 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1eed330b-d64c-42c0-a440-736df7d0d861-operator-scripts\") pod \"1eed330b-d64c-42c0-a440-736df7d0d861\" (UID: \"1eed330b-d64c-42c0-a440-736df7d0d861\") " Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.751330 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1eed330b-d64c-42c0-a440-736df7d0d861-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1eed330b-d64c-42c0-a440-736df7d0d861" (UID: "1eed330b-d64c-42c0-a440-736df7d0d861"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.755155 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1eed330b-d64c-42c0-a440-736df7d0d861-kube-api-access-9bnfk" (OuterVolumeSpecName: "kube-api-access-9bnfk") pod "1eed330b-d64c-42c0-a440-736df7d0d861" (UID: "1eed330b-d64c-42c0-a440-736df7d0d861"). InnerVolumeSpecName "kube-api-access-9bnfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.756867 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-vl4tb" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.852190 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c723ca1-2908-4fb1-820a-c440fbf6616c-operator-scripts\") pod \"3c723ca1-2908-4fb1-820a-c440fbf6616c\" (UID: \"3c723ca1-2908-4fb1-820a-c440fbf6616c\") " Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.852360 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2scf\" (UniqueName: \"kubernetes.io/projected/3c723ca1-2908-4fb1-820a-c440fbf6616c-kube-api-access-j2scf\") pod \"3c723ca1-2908-4fb1-820a-c440fbf6616c\" (UID: \"3c723ca1-2908-4fb1-820a-c440fbf6616c\") " Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.852899 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bnfk\" (UniqueName: \"kubernetes.io/projected/1eed330b-d64c-42c0-a440-736df7d0d861-kube-api-access-9bnfk\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.852931 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1eed330b-d64c-42c0-a440-736df7d0d861-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.853792 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c723ca1-2908-4fb1-820a-c440fbf6616c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3c723ca1-2908-4fb1-820a-c440fbf6616c" (UID: "3c723ca1-2908-4fb1-820a-c440fbf6616c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.855964 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c723ca1-2908-4fb1-820a-c440fbf6616c-kube-api-access-j2scf" (OuterVolumeSpecName: "kube-api-access-j2scf") pod "3c723ca1-2908-4fb1-820a-c440fbf6616c" (UID: "3c723ca1-2908-4fb1-820a-c440fbf6616c"). InnerVolumeSpecName "kube-api-access-j2scf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.954251 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2scf\" (UniqueName: \"kubernetes.io/projected/3c723ca1-2908-4fb1-820a-c440fbf6616c-kube-api-access-j2scf\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:39 crc kubenswrapper[4838]: I0202 11:11:39.954283 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c723ca1-2908-4fb1-820a-c440fbf6616c-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:40 crc kubenswrapper[4838]: I0202 11:11:40.056169 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-etc-swift\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:40 crc kubenswrapper[4838]: E0202 11:11:40.056365 4838 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 02 11:11:40 crc kubenswrapper[4838]: E0202 11:11:40.056393 4838 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 02 11:11:40 crc kubenswrapper[4838]: E0202 11:11:40.056455 4838 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-etc-swift podName:12e53950-9847-46b2-a51a-1fac5b690098 nodeName:}" failed. No retries permitted until 2026-02-02 11:11:56.05643403 +0000 UTC m=+1110.393535058 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-etc-swift") pod "swift-storage-0" (UID: "12e53950-9847-46b2-a51a-1fac5b690098") : configmap "swift-ring-files" not found Feb 02 11:11:40 crc kubenswrapper[4838]: I0202 11:11:40.330983 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5e8e-account-create-update-55tw9" event={"ID":"1eed330b-d64c-42c0-a440-736df7d0d861","Type":"ContainerDied","Data":"27b08ba410e1bd9b68f9c2458d84d01365ccb10bc078f122a379cd1f0c126d41"} Feb 02 11:11:40 crc kubenswrapper[4838]: I0202 11:11:40.331012 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5e8e-account-create-update-55tw9" Feb 02 11:11:40 crc kubenswrapper[4838]: I0202 11:11:40.331025 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27b08ba410e1bd9b68f9c2458d84d01365ccb10bc078f122a379cd1f0c126d41" Feb 02 11:11:40 crc kubenswrapper[4838]: I0202 11:11:40.334255 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-vl4tb" event={"ID":"3c723ca1-2908-4fb1-820a-c440fbf6616c","Type":"ContainerDied","Data":"f8e6ca73e4d5375273abccb7db03f59d6363584a0a835648004b0baeb2301ea9"} Feb 02 11:11:40 crc kubenswrapper[4838]: I0202 11:11:40.334294 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8e6ca73e4d5375273abccb7db03f59d6363584a0a835648004b0baeb2301ea9" Feb 02 11:11:40 crc kubenswrapper[4838]: I0202 11:11:40.334268 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-vl4tb" Feb 02 11:11:40 crc kubenswrapper[4838]: I0202 11:11:40.518409 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a321e834-3079-4d1d-b223-08c4ce184d81" path="/var/lib/kubelet/pods/a321e834-3079-4d1d-b223-08c4ce184d81/volumes" Feb 02 11:11:40 crc kubenswrapper[4838]: I0202 11:11:40.622277 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-c42pv" Feb 02 11:11:40 crc kubenswrapper[4838]: I0202 11:11:40.665565 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8f6gc\" (UniqueName: \"kubernetes.io/projected/ffeae1a4-9be7-4108-a0d7-3ff4f54eb669-kube-api-access-8f6gc\") pod \"ffeae1a4-9be7-4108-a0d7-3ff4f54eb669\" (UID: \"ffeae1a4-9be7-4108-a0d7-3ff4f54eb669\") " Feb 02 11:11:40 crc kubenswrapper[4838]: I0202 11:11:40.665726 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ffeae1a4-9be7-4108-a0d7-3ff4f54eb669-operator-scripts\") pod \"ffeae1a4-9be7-4108-a0d7-3ff4f54eb669\" (UID: \"ffeae1a4-9be7-4108-a0d7-3ff4f54eb669\") " Feb 02 11:11:40 crc kubenswrapper[4838]: I0202 11:11:40.666371 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffeae1a4-9be7-4108-a0d7-3ff4f54eb669-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ffeae1a4-9be7-4108-a0d7-3ff4f54eb669" (UID: "ffeae1a4-9be7-4108-a0d7-3ff4f54eb669"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:40 crc kubenswrapper[4838]: I0202 11:11:40.669819 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffeae1a4-9be7-4108-a0d7-3ff4f54eb669-kube-api-access-8f6gc" (OuterVolumeSpecName: "kube-api-access-8f6gc") pod "ffeae1a4-9be7-4108-a0d7-3ff4f54eb669" (UID: "ffeae1a4-9be7-4108-a0d7-3ff4f54eb669"). InnerVolumeSpecName "kube-api-access-8f6gc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:11:40 crc kubenswrapper[4838]: I0202 11:11:40.767685 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ffeae1a4-9be7-4108-a0d7-3ff4f54eb669-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:40 crc kubenswrapper[4838]: I0202 11:11:40.767949 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8f6gc\" (UniqueName: \"kubernetes.io/projected/ffeae1a4-9be7-4108-a0d7-3ff4f54eb669-kube-api-access-8f6gc\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.343995 4838 generic.go:334] "Generic (PLEG): container finished" podID="698d5f5d-683c-4130-8d4f-d1d59b5d32e4" containerID="bac490b36b57a0475b02c67e445444a2d8f9bcdd6b9fb515df62f3f92e6d09e1" exitCode=0 Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.344169 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"698d5f5d-683c-4130-8d4f-d1d59b5d32e4","Type":"ContainerDied","Data":"bac490b36b57a0475b02c67e445444a2d8f9bcdd6b9fb515df62f3f92e6d09e1"} Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.347994 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-c42pv" event={"ID":"ffeae1a4-9be7-4108-a0d7-3ff4f54eb669","Type":"ContainerDied","Data":"3813f064f7aac3097aa3d7c6661f342e0f5b7dcb281a2ead0cb4e968d7d5f4a9"} Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.348167 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3813f064f7aac3097aa3d7c6661f342e0f5b7dcb281a2ead0cb4e968d7d5f4a9" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.348431 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-c42pv" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.612049 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-snvfg"] Feb 02 11:11:41 crc kubenswrapper[4838]: E0202 11:11:41.612707 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffeae1a4-9be7-4108-a0d7-3ff4f54eb669" containerName="mariadb-account-create-update" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.612728 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffeae1a4-9be7-4108-a0d7-3ff4f54eb669" containerName="mariadb-account-create-update" Feb 02 11:11:41 crc kubenswrapper[4838]: E0202 11:11:41.612744 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a321e834-3079-4d1d-b223-08c4ce184d81" containerName="init" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.612752 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="a321e834-3079-4d1d-b223-08c4ce184d81" containerName="init" Feb 02 11:11:41 crc kubenswrapper[4838]: E0202 11:11:41.612761 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1eed330b-d64c-42c0-a440-736df7d0d861" containerName="mariadb-account-create-update" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.612768 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="1eed330b-d64c-42c0-a440-736df7d0d861" containerName="mariadb-account-create-update" Feb 02 11:11:41 crc kubenswrapper[4838]: E0202 11:11:41.612784 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a321e834-3079-4d1d-b223-08c4ce184d81" containerName="dnsmasq-dns" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.612791 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="a321e834-3079-4d1d-b223-08c4ce184d81" containerName="dnsmasq-dns" Feb 02 11:11:41 crc kubenswrapper[4838]: E0202 11:11:41.612810 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c723ca1-2908-4fb1-820a-c440fbf6616c" containerName="mariadb-database-create" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.612818 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c723ca1-2908-4fb1-820a-c440fbf6616c" containerName="mariadb-database-create" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.613020 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="a321e834-3079-4d1d-b223-08c4ce184d81" containerName="dnsmasq-dns" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.613038 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffeae1a4-9be7-4108-a0d7-3ff4f54eb669" containerName="mariadb-account-create-update" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.613055 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c723ca1-2908-4fb1-820a-c440fbf6616c" containerName="mariadb-database-create" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.613066 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="1eed330b-d64c-42c0-a440-736df7d0d861" containerName="mariadb-account-create-update" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.614584 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-snvfg" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.642889 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.643115 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-9mftv" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.644593 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-78llm" podUID="89d377c3-4929-47c4-abc7-53bb5e058025" containerName="ovn-controller" probeResult="failure" output=< Feb 02 11:11:41 crc kubenswrapper[4838]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Feb 02 11:11:41 crc kubenswrapper[4838]: > Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.652612 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-snvfg"] Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.693068 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.696003 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-gmk9d" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.726695 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztctw\" (UniqueName: \"kubernetes.io/projected/54de1caa-888f-433a-be5e-87b93932abc2-kube-api-access-ztctw\") pod \"glance-db-sync-snvfg\" (UID: \"54de1caa-888f-433a-be5e-87b93932abc2\") " pod="openstack/glance-db-sync-snvfg" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.726994 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54de1caa-888f-433a-be5e-87b93932abc2-combined-ca-bundle\") pod \"glance-db-sync-snvfg\" (UID: \"54de1caa-888f-433a-be5e-87b93932abc2\") " pod="openstack/glance-db-sync-snvfg" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.727110 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/54de1caa-888f-433a-be5e-87b93932abc2-db-sync-config-data\") pod \"glance-db-sync-snvfg\" (UID: \"54de1caa-888f-433a-be5e-87b93932abc2\") " pod="openstack/glance-db-sync-snvfg" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.727240 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54de1caa-888f-433a-be5e-87b93932abc2-config-data\") pod \"glance-db-sync-snvfg\" (UID: \"54de1caa-888f-433a-be5e-87b93932abc2\") " pod="openstack/glance-db-sync-snvfg" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.828980 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztctw\" (UniqueName: \"kubernetes.io/projected/54de1caa-888f-433a-be5e-87b93932abc2-kube-api-access-ztctw\") pod \"glance-db-sync-snvfg\" (UID: \"54de1caa-888f-433a-be5e-87b93932abc2\") " pod="openstack/glance-db-sync-snvfg" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.829123 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54de1caa-888f-433a-be5e-87b93932abc2-combined-ca-bundle\") pod \"glance-db-sync-snvfg\" (UID: \"54de1caa-888f-433a-be5e-87b93932abc2\") " pod="openstack/glance-db-sync-snvfg" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.829165 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/54de1caa-888f-433a-be5e-87b93932abc2-db-sync-config-data\") pod \"glance-db-sync-snvfg\" (UID: \"54de1caa-888f-433a-be5e-87b93932abc2\") " pod="openstack/glance-db-sync-snvfg" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.829204 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54de1caa-888f-433a-be5e-87b93932abc2-config-data\") pod \"glance-db-sync-snvfg\" (UID: \"54de1caa-888f-433a-be5e-87b93932abc2\") " pod="openstack/glance-db-sync-snvfg" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.833952 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54de1caa-888f-433a-be5e-87b93932abc2-combined-ca-bundle\") pod \"glance-db-sync-snvfg\" (UID: \"54de1caa-888f-433a-be5e-87b93932abc2\") " pod="openstack/glance-db-sync-snvfg" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.834675 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/54de1caa-888f-433a-be5e-87b93932abc2-db-sync-config-data\") pod \"glance-db-sync-snvfg\" (UID: \"54de1caa-888f-433a-be5e-87b93932abc2\") " pod="openstack/glance-db-sync-snvfg" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.834858 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54de1caa-888f-433a-be5e-87b93932abc2-config-data\") pod \"glance-db-sync-snvfg\" (UID: \"54de1caa-888f-433a-be5e-87b93932abc2\") " pod="openstack/glance-db-sync-snvfg" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.861108 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztctw\" (UniqueName: \"kubernetes.io/projected/54de1caa-888f-433a-be5e-87b93932abc2-kube-api-access-ztctw\") pod \"glance-db-sync-snvfg\" (UID: \"54de1caa-888f-433a-be5e-87b93932abc2\") " pod="openstack/glance-db-sync-snvfg" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.930472 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-78llm-config-pshdx"] Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.938867 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-78llm-config-pshdx"] Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.938971 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.941351 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Feb 02 11:11:41 crc kubenswrapper[4838]: I0202 11:11:41.958601 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-snvfg" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.032630 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-scripts\") pod \"ovn-controller-78llm-config-pshdx\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.032918 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-var-log-ovn\") pod \"ovn-controller-78llm-config-pshdx\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.032941 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2r8hz\" (UniqueName: \"kubernetes.io/projected/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-kube-api-access-2r8hz\") pod \"ovn-controller-78llm-config-pshdx\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.032964 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-var-run\") pod \"ovn-controller-78llm-config-pshdx\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.033064 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-var-run-ovn\") pod \"ovn-controller-78llm-config-pshdx\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.033080 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-additional-scripts\") pod \"ovn-controller-78llm-config-pshdx\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.134318 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-var-run-ovn\") pod \"ovn-controller-78llm-config-pshdx\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.134353 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-additional-scripts\") pod \"ovn-controller-78llm-config-pshdx\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.134417 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-scripts\") pod \"ovn-controller-78llm-config-pshdx\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.134443 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-var-log-ovn\") pod \"ovn-controller-78llm-config-pshdx\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.134462 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2r8hz\" (UniqueName: \"kubernetes.io/projected/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-kube-api-access-2r8hz\") pod \"ovn-controller-78llm-config-pshdx\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.134480 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-var-run\") pod \"ovn-controller-78llm-config-pshdx\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.134798 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-var-run\") pod \"ovn-controller-78llm-config-pshdx\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.136098 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-var-run-ovn\") pod \"ovn-controller-78llm-config-pshdx\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.136611 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-var-log-ovn\") pod \"ovn-controller-78llm-config-pshdx\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.137119 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-additional-scripts\") pod \"ovn-controller-78llm-config-pshdx\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.137648 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-scripts\") pod \"ovn-controller-78llm-config-pshdx\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.158391 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2r8hz\" (UniqueName: \"kubernetes.io/projected/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-kube-api-access-2r8hz\") pod \"ovn-controller-78llm-config-pshdx\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.276236 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.420316 4838 generic.go:334] "Generic (PLEG): container finished" podID="10f55730-6ea0-4989-a006-b0549f5566a7" containerID="70f80cc3e845e9e6389966fa131ac34e28094b7a7010f3ba63b7ea5f71fcac55" exitCode=0 Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.420428 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"10f55730-6ea0-4989-a006-b0549f5566a7","Type":"ContainerDied","Data":"70f80cc3e845e9e6389966fa131ac34e28094b7a7010f3ba63b7ea5f71fcac55"} Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.423636 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"698d5f5d-683c-4130-8d4f-d1d59b5d32e4","Type":"ContainerStarted","Data":"5609c1ee9b0d2c9dc9161b9a6e7b9324e75153305cbea82c771065f24a03a445"} Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.423871 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.425517 4838 generic.go:334] "Generic (PLEG): container finished" podID="582a43e1-d21a-4421-ae28-0eecd147d19e" containerID="f9b38633054e1d3ea7cace5e8e78d194648526d0b6b0f1fbcabe6063d40712e5" exitCode=0 Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.425561 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-524zj" event={"ID":"582a43e1-d21a-4421-ae28-0eecd147d19e","Type":"ContainerDied","Data":"f9b38633054e1d3ea7cace5e8e78d194648526d0b6b0f1fbcabe6063d40712e5"} Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.570343 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=50.041638039 podStartE2EDuration="1m6.570323888s" podCreationTimestamp="2026-02-02 11:10:36 +0000 UTC" firstStartedPulling="2026-02-02 11:10:50.468463542 +0000 UTC m=+1044.805564570" lastFinishedPulling="2026-02-02 11:11:06.997149391 +0000 UTC m=+1061.334250419" observedRunningTime="2026-02-02 11:11:42.511556748 +0000 UTC m=+1096.848657796" watchObservedRunningTime="2026-02-02 11:11:42.570323888 +0000 UTC m=+1096.907424916" Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.593854 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-snvfg"] Feb 02 11:11:42 crc kubenswrapper[4838]: W0202 11:11:42.800899 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84c6aa47_0d53_44e4_841f_8f5fdfd2af0f.slice/crio-b849012ee02ed0dd1e6f3767ba99b0ea8f43de1e221000c00077702d0d24d465 WatchSource:0}: Error finding container b849012ee02ed0dd1e6f3767ba99b0ea8f43de1e221000c00077702d0d24d465: Status 404 returned error can't find the container with id b849012ee02ed0dd1e6f3767ba99b0ea8f43de1e221000c00077702d0d24d465 Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.803363 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-78llm-config-pshdx"] Feb 02 11:11:42 crc kubenswrapper[4838]: I0202 11:11:42.892720 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.439743 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-snvfg" event={"ID":"54de1caa-888f-433a-be5e-87b93932abc2","Type":"ContainerStarted","Data":"a0042ff1a4615538f1e06137beb5f5fec92474e75ad9d142e41732bee581a2ec"} Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.440941 4838 generic.go:334] "Generic (PLEG): container finished" podID="84c6aa47-0d53-44e4-841f-8f5fdfd2af0f" containerID="032cee7f2352b3814216c51d96d3676f339a080dbc8bc6e165bf6b82886d612e" exitCode=0 Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.440986 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-78llm-config-pshdx" event={"ID":"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f","Type":"ContainerDied","Data":"032cee7f2352b3814216c51d96d3676f339a080dbc8bc6e165bf6b82886d612e"} Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.441041 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-78llm-config-pshdx" event={"ID":"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f","Type":"ContainerStarted","Data":"b849012ee02ed0dd1e6f3767ba99b0ea8f43de1e221000c00077702d0d24d465"} Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.446540 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"10f55730-6ea0-4989-a006-b0549f5566a7","Type":"ContainerStarted","Data":"6a52eb6196a3e58500dc0f8f670fcaaf4bc4f7075c3b6a2c4342f51a72e7519b"} Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.494064 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=52.745939194 podStartE2EDuration="1m7.494039433s" podCreationTimestamp="2026-02-02 11:10:36 +0000 UTC" firstStartedPulling="2026-02-02 11:10:50.462461953 +0000 UTC m=+1044.799562971" lastFinishedPulling="2026-02-02 11:11:05.210562182 +0000 UTC m=+1059.547663210" observedRunningTime="2026-02-02 11:11:43.483714141 +0000 UTC m=+1097.820815209" watchObservedRunningTime="2026-02-02 11:11:43.494039433 +0000 UTC m=+1097.831140471" Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.835230 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.869887 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/582a43e1-d21a-4421-ae28-0eecd147d19e-combined-ca-bundle\") pod \"582a43e1-d21a-4421-ae28-0eecd147d19e\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.869952 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/582a43e1-d21a-4421-ae28-0eecd147d19e-dispersionconf\") pod \"582a43e1-d21a-4421-ae28-0eecd147d19e\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.870006 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/582a43e1-d21a-4421-ae28-0eecd147d19e-etc-swift\") pod \"582a43e1-d21a-4421-ae28-0eecd147d19e\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.870051 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/582a43e1-d21a-4421-ae28-0eecd147d19e-ring-data-devices\") pod \"582a43e1-d21a-4421-ae28-0eecd147d19e\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.870127 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/582a43e1-d21a-4421-ae28-0eecd147d19e-swiftconf\") pod \"582a43e1-d21a-4421-ae28-0eecd147d19e\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.870254 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82t2v\" (UniqueName: \"kubernetes.io/projected/582a43e1-d21a-4421-ae28-0eecd147d19e-kube-api-access-82t2v\") pod \"582a43e1-d21a-4421-ae28-0eecd147d19e\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.870302 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/582a43e1-d21a-4421-ae28-0eecd147d19e-scripts\") pod \"582a43e1-d21a-4421-ae28-0eecd147d19e\" (UID: \"582a43e1-d21a-4421-ae28-0eecd147d19e\") " Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.870719 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/582a43e1-d21a-4421-ae28-0eecd147d19e-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "582a43e1-d21a-4421-ae28-0eecd147d19e" (UID: "582a43e1-d21a-4421-ae28-0eecd147d19e"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.870885 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/582a43e1-d21a-4421-ae28-0eecd147d19e-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "582a43e1-d21a-4421-ae28-0eecd147d19e" (UID: "582a43e1-d21a-4421-ae28-0eecd147d19e"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.877940 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/582a43e1-d21a-4421-ae28-0eecd147d19e-kube-api-access-82t2v" (OuterVolumeSpecName: "kube-api-access-82t2v") pod "582a43e1-d21a-4421-ae28-0eecd147d19e" (UID: "582a43e1-d21a-4421-ae28-0eecd147d19e"). InnerVolumeSpecName "kube-api-access-82t2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.891595 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/582a43e1-d21a-4421-ae28-0eecd147d19e-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "582a43e1-d21a-4421-ae28-0eecd147d19e" (UID: "582a43e1-d21a-4421-ae28-0eecd147d19e"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.906765 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/582a43e1-d21a-4421-ae28-0eecd147d19e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "582a43e1-d21a-4421-ae28-0eecd147d19e" (UID: "582a43e1-d21a-4421-ae28-0eecd147d19e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.920593 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/582a43e1-d21a-4421-ae28-0eecd147d19e-scripts" (OuterVolumeSpecName: "scripts") pod "582a43e1-d21a-4421-ae28-0eecd147d19e" (UID: "582a43e1-d21a-4421-ae28-0eecd147d19e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.940531 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/582a43e1-d21a-4421-ae28-0eecd147d19e-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "582a43e1-d21a-4421-ae28-0eecd147d19e" (UID: "582a43e1-d21a-4421-ae28-0eecd147d19e"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.972413 4838 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/582a43e1-d21a-4421-ae28-0eecd147d19e-swiftconf\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.972458 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82t2v\" (UniqueName: \"kubernetes.io/projected/582a43e1-d21a-4421-ae28-0eecd147d19e-kube-api-access-82t2v\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.972474 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/582a43e1-d21a-4421-ae28-0eecd147d19e-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.972485 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/582a43e1-d21a-4421-ae28-0eecd147d19e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.972498 4838 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/582a43e1-d21a-4421-ae28-0eecd147d19e-dispersionconf\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.972507 4838 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/582a43e1-d21a-4421-ae28-0eecd147d19e-etc-swift\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:43 crc kubenswrapper[4838]: I0202 11:11:43.972518 4838 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/582a43e1-d21a-4421-ae28-0eecd147d19e-ring-data-devices\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:44 crc kubenswrapper[4838]: I0202 11:11:44.183484 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-c42pv"] Feb 02 11:11:44 crc kubenswrapper[4838]: I0202 11:11:44.200386 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-c42pv"] Feb 02 11:11:44 crc kubenswrapper[4838]: I0202 11:11:44.455411 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-524zj" event={"ID":"582a43e1-d21a-4421-ae28-0eecd147d19e","Type":"ContainerDied","Data":"eff77afcd85fc5d4cf2546f6f9347f95d9a2d51deba24bf0fc59c0b43bb1e7dd"} Feb 02 11:11:44 crc kubenswrapper[4838]: I0202 11:11:44.455705 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eff77afcd85fc5d4cf2546f6f9347f95d9a2d51deba24bf0fc59c0b43bb1e7dd" Feb 02 11:11:44 crc kubenswrapper[4838]: I0202 11:11:44.455777 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-524zj" Feb 02 11:11:44 crc kubenswrapper[4838]: I0202 11:11:44.517864 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffeae1a4-9be7-4108-a0d7-3ff4f54eb669" path="/var/lib/kubelet/pods/ffeae1a4-9be7-4108-a0d7-3ff4f54eb669/volumes" Feb 02 11:11:44 crc kubenswrapper[4838]: I0202 11:11:44.890641 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:44 crc kubenswrapper[4838]: I0202 11:11:44.989051 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-var-run\") pod \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " Feb 02 11:11:44 crc kubenswrapper[4838]: I0202 11:11:44.989105 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-var-log-ovn\") pod \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " Feb 02 11:11:44 crc kubenswrapper[4838]: I0202 11:11:44.989163 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-additional-scripts\") pod \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " Feb 02 11:11:44 crc kubenswrapper[4838]: I0202 11:11:44.989192 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-scripts\") pod \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " Feb 02 11:11:44 crc kubenswrapper[4838]: I0202 11:11:44.989238 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-var-run" (OuterVolumeSpecName: "var-run") pod "84c6aa47-0d53-44e4-841f-8f5fdfd2af0f" (UID: "84c6aa47-0d53-44e4-841f-8f5fdfd2af0f"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:11:44 crc kubenswrapper[4838]: I0202 11:11:44.989244 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "84c6aa47-0d53-44e4-841f-8f5fdfd2af0f" (UID: "84c6aa47-0d53-44e4-841f-8f5fdfd2af0f"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:11:44 crc kubenswrapper[4838]: I0202 11:11:44.989356 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-var-run-ovn\") pod \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " Feb 02 11:11:44 crc kubenswrapper[4838]: I0202 11:11:44.989403 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2r8hz\" (UniqueName: \"kubernetes.io/projected/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-kube-api-access-2r8hz\") pod \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\" (UID: \"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f\") " Feb 02 11:11:44 crc kubenswrapper[4838]: I0202 11:11:44.989985 4838 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-var-run\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:44 crc kubenswrapper[4838]: I0202 11:11:44.990005 4838 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-var-log-ovn\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:44 crc kubenswrapper[4838]: I0202 11:11:44.990264 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "84c6aa47-0d53-44e4-841f-8f5fdfd2af0f" (UID: "84c6aa47-0d53-44e4-841f-8f5fdfd2af0f"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:44 crc kubenswrapper[4838]: I0202 11:11:44.990323 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "84c6aa47-0d53-44e4-841f-8f5fdfd2af0f" (UID: "84c6aa47-0d53-44e4-841f-8f5fdfd2af0f"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:11:44 crc kubenswrapper[4838]: I0202 11:11:44.990530 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-scripts" (OuterVolumeSpecName: "scripts") pod "84c6aa47-0d53-44e4-841f-8f5fdfd2af0f" (UID: "84c6aa47-0d53-44e4-841f-8f5fdfd2af0f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:45 crc kubenswrapper[4838]: I0202 11:11:45.007712 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-kube-api-access-2r8hz" (OuterVolumeSpecName: "kube-api-access-2r8hz") pod "84c6aa47-0d53-44e4-841f-8f5fdfd2af0f" (UID: "84c6aa47-0d53-44e4-841f-8f5fdfd2af0f"). InnerVolumeSpecName "kube-api-access-2r8hz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:11:45 crc kubenswrapper[4838]: I0202 11:11:45.091265 4838 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-additional-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:45 crc kubenswrapper[4838]: I0202 11:11:45.091307 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:45 crc kubenswrapper[4838]: I0202 11:11:45.091319 4838 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-var-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:45 crc kubenswrapper[4838]: I0202 11:11:45.091330 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2r8hz\" (UniqueName: \"kubernetes.io/projected/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f-kube-api-access-2r8hz\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:45 crc kubenswrapper[4838]: I0202 11:11:45.464824 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-78llm-config-pshdx" event={"ID":"84c6aa47-0d53-44e4-841f-8f5fdfd2af0f","Type":"ContainerDied","Data":"b849012ee02ed0dd1e6f3767ba99b0ea8f43de1e221000c00077702d0d24d465"} Feb 02 11:11:45 crc kubenswrapper[4838]: I0202 11:11:45.464877 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b849012ee02ed0dd1e6f3767ba99b0ea8f43de1e221000c00077702d0d24d465" Feb 02 11:11:45 crc kubenswrapper[4838]: I0202 11:11:45.464927 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-78llm-config-pshdx" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.022121 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-78llm-config-pshdx"] Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.027728 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-78llm-config-pshdx"] Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.118876 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-78llm-config-tqpjs"] Feb 02 11:11:46 crc kubenswrapper[4838]: E0202 11:11:46.119446 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="582a43e1-d21a-4421-ae28-0eecd147d19e" containerName="swift-ring-rebalance" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.119538 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="582a43e1-d21a-4421-ae28-0eecd147d19e" containerName="swift-ring-rebalance" Feb 02 11:11:46 crc kubenswrapper[4838]: E0202 11:11:46.119661 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84c6aa47-0d53-44e4-841f-8f5fdfd2af0f" containerName="ovn-config" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.119746 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="84c6aa47-0d53-44e4-841f-8f5fdfd2af0f" containerName="ovn-config" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.121305 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="84c6aa47-0d53-44e4-841f-8f5fdfd2af0f" containerName="ovn-config" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.121430 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="582a43e1-d21a-4421-ae28-0eecd147d19e" containerName="swift-ring-rebalance" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.122225 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.125267 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.133681 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-78llm-config-tqpjs"] Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.208727 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/12f6110a-f054-4b5a-a8b9-16ee2395d922-additional-scripts\") pod \"ovn-controller-78llm-config-tqpjs\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.208787 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/12f6110a-f054-4b5a-a8b9-16ee2395d922-var-log-ovn\") pod \"ovn-controller-78llm-config-tqpjs\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.208914 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd4sx\" (UniqueName: \"kubernetes.io/projected/12f6110a-f054-4b5a-a8b9-16ee2395d922-kube-api-access-sd4sx\") pod \"ovn-controller-78llm-config-tqpjs\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.209037 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/12f6110a-f054-4b5a-a8b9-16ee2395d922-var-run-ovn\") pod \"ovn-controller-78llm-config-tqpjs\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.209175 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/12f6110a-f054-4b5a-a8b9-16ee2395d922-scripts\") pod \"ovn-controller-78llm-config-tqpjs\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.209210 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/12f6110a-f054-4b5a-a8b9-16ee2395d922-var-run\") pod \"ovn-controller-78llm-config-tqpjs\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.310659 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/12f6110a-f054-4b5a-a8b9-16ee2395d922-var-log-ovn\") pod \"ovn-controller-78llm-config-tqpjs\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.310725 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd4sx\" (UniqueName: \"kubernetes.io/projected/12f6110a-f054-4b5a-a8b9-16ee2395d922-kube-api-access-sd4sx\") pod \"ovn-controller-78llm-config-tqpjs\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.310767 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/12f6110a-f054-4b5a-a8b9-16ee2395d922-var-run-ovn\") pod \"ovn-controller-78llm-config-tqpjs\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.310812 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/12f6110a-f054-4b5a-a8b9-16ee2395d922-scripts\") pod \"ovn-controller-78llm-config-tqpjs\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.310832 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/12f6110a-f054-4b5a-a8b9-16ee2395d922-var-run\") pod \"ovn-controller-78llm-config-tqpjs\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.310877 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/12f6110a-f054-4b5a-a8b9-16ee2395d922-additional-scripts\") pod \"ovn-controller-78llm-config-tqpjs\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.311110 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/12f6110a-f054-4b5a-a8b9-16ee2395d922-var-run-ovn\") pod \"ovn-controller-78llm-config-tqpjs\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.311115 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/12f6110a-f054-4b5a-a8b9-16ee2395d922-var-log-ovn\") pod \"ovn-controller-78llm-config-tqpjs\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.311334 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/12f6110a-f054-4b5a-a8b9-16ee2395d922-var-run\") pod \"ovn-controller-78llm-config-tqpjs\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.311538 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/12f6110a-f054-4b5a-a8b9-16ee2395d922-additional-scripts\") pod \"ovn-controller-78llm-config-tqpjs\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.312919 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/12f6110a-f054-4b5a-a8b9-16ee2395d922-scripts\") pod \"ovn-controller-78llm-config-tqpjs\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.329255 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd4sx\" (UniqueName: \"kubernetes.io/projected/12f6110a-f054-4b5a-a8b9-16ee2395d922-kube-api-access-sd4sx\") pod \"ovn-controller-78llm-config-tqpjs\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.438862 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.446908 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-78llm" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.560188 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84c6aa47-0d53-44e4-841f-8f5fdfd2af0f" path="/var/lib/kubelet/pods/84c6aa47-0d53-44e4-841f-8f5fdfd2af0f/volumes" Feb 02 11:11:46 crc kubenswrapper[4838]: I0202 11:11:46.567403 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Feb 02 11:11:47 crc kubenswrapper[4838]: I0202 11:11:47.017106 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-78llm-config-tqpjs"] Feb 02 11:11:47 crc kubenswrapper[4838]: I0202 11:11:47.492118 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-78llm-config-tqpjs" event={"ID":"12f6110a-f054-4b5a-a8b9-16ee2395d922","Type":"ContainerStarted","Data":"fca4963b9b40891c5fe798ef2c5fac1b8bc291516f36c0418b0a8d6d17c4014a"} Feb 02 11:11:47 crc kubenswrapper[4838]: I0202 11:11:47.492408 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-78llm-config-tqpjs" event={"ID":"12f6110a-f054-4b5a-a8b9-16ee2395d922","Type":"ContainerStarted","Data":"d9d10df38f34ffc0de486e0050a3782afc8861543d5c6c76eb6fd40bbf7ebae9"} Feb 02 11:11:47 crc kubenswrapper[4838]: I0202 11:11:47.545909 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-78llm-config-tqpjs" podStartSLOduration=1.545885043 podStartE2EDuration="1.545885043s" podCreationTimestamp="2026-02-02 11:11:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:11:47.534866202 +0000 UTC m=+1101.871967220" watchObservedRunningTime="2026-02-02 11:11:47.545885043 +0000 UTC m=+1101.882986081" Feb 02 11:11:47 crc kubenswrapper[4838]: I0202 11:11:47.771299 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:11:48 crc kubenswrapper[4838]: I0202 11:11:48.501512 4838 generic.go:334] "Generic (PLEG): container finished" podID="12f6110a-f054-4b5a-a8b9-16ee2395d922" containerID="fca4963b9b40891c5fe798ef2c5fac1b8bc291516f36c0418b0a8d6d17c4014a" exitCode=0 Feb 02 11:11:48 crc kubenswrapper[4838]: I0202 11:11:48.501554 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-78llm-config-tqpjs" event={"ID":"12f6110a-f054-4b5a-a8b9-16ee2395d922","Type":"ContainerDied","Data":"fca4963b9b40891c5fe798ef2c5fac1b8bc291516f36c0418b0a8d6d17c4014a"} Feb 02 11:11:49 crc kubenswrapper[4838]: I0202 11:11:49.192391 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-9fh77"] Feb 02 11:11:49 crc kubenswrapper[4838]: I0202 11:11:49.193361 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-9fh77" Feb 02 11:11:49 crc kubenswrapper[4838]: I0202 11:11:49.195223 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Feb 02 11:11:49 crc kubenswrapper[4838]: I0202 11:11:49.203446 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-9fh77"] Feb 02 11:11:49 crc kubenswrapper[4838]: I0202 11:11:49.260298 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2-operator-scripts\") pod \"root-account-create-update-9fh77\" (UID: \"145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2\") " pod="openstack/root-account-create-update-9fh77" Feb 02 11:11:49 crc kubenswrapper[4838]: I0202 11:11:49.260469 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ft6pd\" (UniqueName: \"kubernetes.io/projected/145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2-kube-api-access-ft6pd\") pod \"root-account-create-update-9fh77\" (UID: \"145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2\") " pod="openstack/root-account-create-update-9fh77" Feb 02 11:11:49 crc kubenswrapper[4838]: I0202 11:11:49.361647 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ft6pd\" (UniqueName: \"kubernetes.io/projected/145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2-kube-api-access-ft6pd\") pod \"root-account-create-update-9fh77\" (UID: \"145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2\") " pod="openstack/root-account-create-update-9fh77" Feb 02 11:11:49 crc kubenswrapper[4838]: I0202 11:11:49.361928 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2-operator-scripts\") pod \"root-account-create-update-9fh77\" (UID: \"145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2\") " pod="openstack/root-account-create-update-9fh77" Feb 02 11:11:49 crc kubenswrapper[4838]: I0202 11:11:49.362651 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2-operator-scripts\") pod \"root-account-create-update-9fh77\" (UID: \"145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2\") " pod="openstack/root-account-create-update-9fh77" Feb 02 11:11:49 crc kubenswrapper[4838]: I0202 11:11:49.381275 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ft6pd\" (UniqueName: \"kubernetes.io/projected/145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2-kube-api-access-ft6pd\") pod \"root-account-create-update-9fh77\" (UID: \"145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2\") " pod="openstack/root-account-create-update-9fh77" Feb 02 11:11:49 crc kubenswrapper[4838]: I0202 11:11:49.560073 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-9fh77" Feb 02 11:11:55 crc kubenswrapper[4838]: I0202 11:11:55.884985 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:55 crc kubenswrapper[4838]: I0202 11:11:55.984324 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/12f6110a-f054-4b5a-a8b9-16ee2395d922-scripts\") pod \"12f6110a-f054-4b5a-a8b9-16ee2395d922\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " Feb 02 11:11:55 crc kubenswrapper[4838]: I0202 11:11:55.984474 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/12f6110a-f054-4b5a-a8b9-16ee2395d922-var-run\") pod \"12f6110a-f054-4b5a-a8b9-16ee2395d922\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " Feb 02 11:11:55 crc kubenswrapper[4838]: I0202 11:11:55.984514 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/12f6110a-f054-4b5a-a8b9-16ee2395d922-additional-scripts\") pod \"12f6110a-f054-4b5a-a8b9-16ee2395d922\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " Feb 02 11:11:55 crc kubenswrapper[4838]: I0202 11:11:55.984602 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/12f6110a-f054-4b5a-a8b9-16ee2395d922-var-run" (OuterVolumeSpecName: "var-run") pod "12f6110a-f054-4b5a-a8b9-16ee2395d922" (UID: "12f6110a-f054-4b5a-a8b9-16ee2395d922"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:11:55 crc kubenswrapper[4838]: I0202 11:11:55.984669 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/12f6110a-f054-4b5a-a8b9-16ee2395d922-var-run-ovn\") pod \"12f6110a-f054-4b5a-a8b9-16ee2395d922\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " Feb 02 11:11:55 crc kubenswrapper[4838]: I0202 11:11:55.984718 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sd4sx\" (UniqueName: \"kubernetes.io/projected/12f6110a-f054-4b5a-a8b9-16ee2395d922-kube-api-access-sd4sx\") pod \"12f6110a-f054-4b5a-a8b9-16ee2395d922\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " Feb 02 11:11:55 crc kubenswrapper[4838]: I0202 11:11:55.984756 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/12f6110a-f054-4b5a-a8b9-16ee2395d922-var-log-ovn\") pod \"12f6110a-f054-4b5a-a8b9-16ee2395d922\" (UID: \"12f6110a-f054-4b5a-a8b9-16ee2395d922\") " Feb 02 11:11:55 crc kubenswrapper[4838]: I0202 11:11:55.984915 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/12f6110a-f054-4b5a-a8b9-16ee2395d922-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "12f6110a-f054-4b5a-a8b9-16ee2395d922" (UID: "12f6110a-f054-4b5a-a8b9-16ee2395d922"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:11:55 crc kubenswrapper[4838]: I0202 11:11:55.984998 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/12f6110a-f054-4b5a-a8b9-16ee2395d922-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "12f6110a-f054-4b5a-a8b9-16ee2395d922" (UID: "12f6110a-f054-4b5a-a8b9-16ee2395d922"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:11:55 crc kubenswrapper[4838]: I0202 11:11:55.985253 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12f6110a-f054-4b5a-a8b9-16ee2395d922-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "12f6110a-f054-4b5a-a8b9-16ee2395d922" (UID: "12f6110a-f054-4b5a-a8b9-16ee2395d922"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:55 crc kubenswrapper[4838]: I0202 11:11:55.985525 4838 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/12f6110a-f054-4b5a-a8b9-16ee2395d922-var-run\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:55 crc kubenswrapper[4838]: I0202 11:11:55.985541 4838 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/12f6110a-f054-4b5a-a8b9-16ee2395d922-additional-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:55 crc kubenswrapper[4838]: I0202 11:11:55.985555 4838 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/12f6110a-f054-4b5a-a8b9-16ee2395d922-var-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:55 crc kubenswrapper[4838]: I0202 11:11:55.985565 4838 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/12f6110a-f054-4b5a-a8b9-16ee2395d922-var-log-ovn\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:55 crc kubenswrapper[4838]: I0202 11:11:55.985792 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12f6110a-f054-4b5a-a8b9-16ee2395d922-scripts" (OuterVolumeSpecName: "scripts") pod "12f6110a-f054-4b5a-a8b9-16ee2395d922" (UID: "12f6110a-f054-4b5a-a8b9-16ee2395d922"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:11:55 crc kubenswrapper[4838]: I0202 11:11:55.996189 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12f6110a-f054-4b5a-a8b9-16ee2395d922-kube-api-access-sd4sx" (OuterVolumeSpecName: "kube-api-access-sd4sx") pod "12f6110a-f054-4b5a-a8b9-16ee2395d922" (UID: "12f6110a-f054-4b5a-a8b9-16ee2395d922"). InnerVolumeSpecName "kube-api-access-sd4sx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:11:56 crc kubenswrapper[4838]: I0202 11:11:56.087416 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-etc-swift\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:56 crc kubenswrapper[4838]: I0202 11:11:56.087532 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sd4sx\" (UniqueName: \"kubernetes.io/projected/12f6110a-f054-4b5a-a8b9-16ee2395d922-kube-api-access-sd4sx\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:56 crc kubenswrapper[4838]: I0202 11:11:56.087550 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/12f6110a-f054-4b5a-a8b9-16ee2395d922-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:11:56 crc kubenswrapper[4838]: I0202 11:11:56.093805 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/12e53950-9847-46b2-a51a-1fac5b690098-etc-swift\") pod \"swift-storage-0\" (UID: \"12e53950-9847-46b2-a51a-1fac5b690098\") " pod="openstack/swift-storage-0" Feb 02 11:11:56 crc kubenswrapper[4838]: I0202 11:11:56.206663 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Feb 02 11:11:56 crc kubenswrapper[4838]: I0202 11:11:56.304154 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-9fh77"] Feb 02 11:11:56 crc kubenswrapper[4838]: W0202 11:11:56.317660 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod145b0b8b_9fcb_4ed3_b4e1_6d6d06b3ebf2.slice/crio-c1f47d75afc51028bdba7f937241b48410ab00ee6c706b175cb10e667d48d281 WatchSource:0}: Error finding container c1f47d75afc51028bdba7f937241b48410ab00ee6c706b175cb10e667d48d281: Status 404 returned error can't find the container with id c1f47d75afc51028bdba7f937241b48410ab00ee6c706b175cb10e667d48d281 Feb 02 11:11:56 crc kubenswrapper[4838]: I0202 11:11:56.588735 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-78llm-config-tqpjs" event={"ID":"12f6110a-f054-4b5a-a8b9-16ee2395d922","Type":"ContainerDied","Data":"d9d10df38f34ffc0de486e0050a3782afc8861543d5c6c76eb6fd40bbf7ebae9"} Feb 02 11:11:56 crc kubenswrapper[4838]: I0202 11:11:56.589004 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9d10df38f34ffc0de486e0050a3782afc8861543d5c6c76eb6fd40bbf7ebae9" Feb 02 11:11:56 crc kubenswrapper[4838]: I0202 11:11:56.588794 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-78llm-config-tqpjs" Feb 02 11:11:56 crc kubenswrapper[4838]: I0202 11:11:56.741589 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Feb 02 11:11:56 crc kubenswrapper[4838]: I0202 11:11:56.961953 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-78llm-config-tqpjs"] Feb 02 11:11:56 crc kubenswrapper[4838]: I0202 11:11:56.969313 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-78llm-config-tqpjs"] Feb 02 11:11:57 crc kubenswrapper[4838]: I0202 11:11:57.447060 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Feb 02 11:11:57 crc kubenswrapper[4838]: I0202 11:11:57.598095 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-9fh77" event={"ID":"145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2","Type":"ContainerStarted","Data":"c1f47d75afc51028bdba7f937241b48410ab00ee6c706b175cb10e667d48d281"} Feb 02 11:11:57 crc kubenswrapper[4838]: I0202 11:11:57.599320 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"12e53950-9847-46b2-a51a-1fac5b690098","Type":"ContainerStarted","Data":"d178b7b545fdb692d41e4d34ae1c5d1c4ad155a7d2203622bd81b0eb7b43d39c"} Feb 02 11:11:57 crc kubenswrapper[4838]: I0202 11:11:57.750451 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-6wr5s"] Feb 02 11:11:57 crc kubenswrapper[4838]: E0202 11:11:57.750896 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12f6110a-f054-4b5a-a8b9-16ee2395d922" containerName="ovn-config" Feb 02 11:11:57 crc kubenswrapper[4838]: I0202 11:11:57.750918 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="12f6110a-f054-4b5a-a8b9-16ee2395d922" containerName="ovn-config" Feb 02 11:11:57 crc kubenswrapper[4838]: I0202 11:11:57.751105 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="12f6110a-f054-4b5a-a8b9-16ee2395d922" containerName="ovn-config" Feb 02 11:11:57 crc kubenswrapper[4838]: I0202 11:11:57.751681 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6wr5s" Feb 02 11:11:57 crc kubenswrapper[4838]: I0202 11:11:57.765709 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-6wr5s"] Feb 02 11:11:57 crc kubenswrapper[4838]: I0202 11:11:57.776228 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:11:57 crc kubenswrapper[4838]: I0202 11:11:57.844729 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-4bd92"] Feb 02 11:11:57 crc kubenswrapper[4838]: I0202 11:11:57.851503 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-4bd92" Feb 02 11:11:57 crc kubenswrapper[4838]: I0202 11:11:57.874469 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-4bd92"] Feb 02 11:11:57 crc kubenswrapper[4838]: I0202 11:11:57.923123 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpd8r\" (UniqueName: \"kubernetes.io/projected/e57ac670-eea3-4857-8990-872ce1dba0e4-kube-api-access-kpd8r\") pod \"cinder-db-create-6wr5s\" (UID: \"e57ac670-eea3-4857-8990-872ce1dba0e4\") " pod="openstack/cinder-db-create-6wr5s" Feb 02 11:11:57 crc kubenswrapper[4838]: I0202 11:11:57.923402 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e57ac670-eea3-4857-8990-872ce1dba0e4-operator-scripts\") pod \"cinder-db-create-6wr5s\" (UID: \"e57ac670-eea3-4857-8990-872ce1dba0e4\") " pod="openstack/cinder-db-create-6wr5s" Feb 02 11:11:57 crc kubenswrapper[4838]: I0202 11:11:57.956653 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-9eef-account-create-update-ldszm"] Feb 02 11:11:57 crc kubenswrapper[4838]: I0202 11:11:57.958030 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-9eef-account-create-update-ldszm" Feb 02 11:11:57 crc kubenswrapper[4838]: I0202 11:11:57.962489 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Feb 02 11:11:57 crc kubenswrapper[4838]: I0202 11:11:57.968515 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-9eef-account-create-update-ldszm"] Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.025117 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpd8r\" (UniqueName: \"kubernetes.io/projected/e57ac670-eea3-4857-8990-872ce1dba0e4-kube-api-access-kpd8r\") pod \"cinder-db-create-6wr5s\" (UID: \"e57ac670-eea3-4857-8990-872ce1dba0e4\") " pod="openstack/cinder-db-create-6wr5s" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.025179 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e57ac670-eea3-4857-8990-872ce1dba0e4-operator-scripts\") pod \"cinder-db-create-6wr5s\" (UID: \"e57ac670-eea3-4857-8990-872ce1dba0e4\") " pod="openstack/cinder-db-create-6wr5s" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.025234 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqp4b\" (UniqueName: \"kubernetes.io/projected/6fdc9ecb-a604-4618-89a6-fd5a2237f10c-kube-api-access-rqp4b\") pod \"barbican-db-create-4bd92\" (UID: \"6fdc9ecb-a604-4618-89a6-fd5a2237f10c\") " pod="openstack/barbican-db-create-4bd92" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.025300 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6fdc9ecb-a604-4618-89a6-fd5a2237f10c-operator-scripts\") pod \"barbican-db-create-4bd92\" (UID: \"6fdc9ecb-a604-4618-89a6-fd5a2237f10c\") " pod="openstack/barbican-db-create-4bd92" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.026221 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e57ac670-eea3-4857-8990-872ce1dba0e4-operator-scripts\") pod \"cinder-db-create-6wr5s\" (UID: \"e57ac670-eea3-4857-8990-872ce1dba0e4\") " pod="openstack/cinder-db-create-6wr5s" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.037232 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-8fe7-account-create-update-zd7vb"] Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.038405 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8fe7-account-create-update-zd7vb" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.044136 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.055505 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8fe7-account-create-update-zd7vb"] Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.065813 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpd8r\" (UniqueName: \"kubernetes.io/projected/e57ac670-eea3-4857-8990-872ce1dba0e4-kube-api-access-kpd8r\") pod \"cinder-db-create-6wr5s\" (UID: \"e57ac670-eea3-4857-8990-872ce1dba0e4\") " pod="openstack/cinder-db-create-6wr5s" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.074018 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6wr5s" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.126798 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqp4b\" (UniqueName: \"kubernetes.io/projected/6fdc9ecb-a604-4618-89a6-fd5a2237f10c-kube-api-access-rqp4b\") pod \"barbican-db-create-4bd92\" (UID: \"6fdc9ecb-a604-4618-89a6-fd5a2237f10c\") " pod="openstack/barbican-db-create-4bd92" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.126905 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6fdc9ecb-a604-4618-89a6-fd5a2237f10c-operator-scripts\") pod \"barbican-db-create-4bd92\" (UID: \"6fdc9ecb-a604-4618-89a6-fd5a2237f10c\") " pod="openstack/barbican-db-create-4bd92" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.126946 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8e20559-4e85-40ad-a733-41497a1772d1-operator-scripts\") pod \"cinder-9eef-account-create-update-ldszm\" (UID: \"c8e20559-4e85-40ad-a733-41497a1772d1\") " pod="openstack/cinder-9eef-account-create-update-ldszm" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.126978 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d17b5d07-41bc-4ab5-b14d-c36ed19c5098-operator-scripts\") pod \"barbican-8fe7-account-create-update-zd7vb\" (UID: \"d17b5d07-41bc-4ab5-b14d-c36ed19c5098\") " pod="openstack/barbican-8fe7-account-create-update-zd7vb" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.127046 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sc2h\" (UniqueName: \"kubernetes.io/projected/c8e20559-4e85-40ad-a733-41497a1772d1-kube-api-access-9sc2h\") pod \"cinder-9eef-account-create-update-ldszm\" (UID: \"c8e20559-4e85-40ad-a733-41497a1772d1\") " pod="openstack/cinder-9eef-account-create-update-ldszm" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.127102 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8vkf\" (UniqueName: \"kubernetes.io/projected/d17b5d07-41bc-4ab5-b14d-c36ed19c5098-kube-api-access-n8vkf\") pod \"barbican-8fe7-account-create-update-zd7vb\" (UID: \"d17b5d07-41bc-4ab5-b14d-c36ed19c5098\") " pod="openstack/barbican-8fe7-account-create-update-zd7vb" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.128210 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6fdc9ecb-a604-4618-89a6-fd5a2237f10c-operator-scripts\") pod \"barbican-db-create-4bd92\" (UID: \"6fdc9ecb-a604-4618-89a6-fd5a2237f10c\") " pod="openstack/barbican-db-create-4bd92" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.138592 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-cckg8"] Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.140142 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-cckg8" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.142425 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.142692 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.142891 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-9mbr8" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.143216 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.153125 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqp4b\" (UniqueName: \"kubernetes.io/projected/6fdc9ecb-a604-4618-89a6-fd5a2237f10c-kube-api-access-rqp4b\") pod \"barbican-db-create-4bd92\" (UID: \"6fdc9ecb-a604-4618-89a6-fd5a2237f10c\") " pod="openstack/barbican-db-create-4bd92" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.156071 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-cckg8"] Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.186427 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-4bd92" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.228488 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cea963bb-084e-4ba7-98b6-342e509be13f-combined-ca-bundle\") pod \"keystone-db-sync-cckg8\" (UID: \"cea963bb-084e-4ba7-98b6-342e509be13f\") " pod="openstack/keystone-db-sync-cckg8" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.228553 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8e20559-4e85-40ad-a733-41497a1772d1-operator-scripts\") pod \"cinder-9eef-account-create-update-ldszm\" (UID: \"c8e20559-4e85-40ad-a733-41497a1772d1\") " pod="openstack/cinder-9eef-account-create-update-ldszm" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.228586 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d17b5d07-41bc-4ab5-b14d-c36ed19c5098-operator-scripts\") pod \"barbican-8fe7-account-create-update-zd7vb\" (UID: \"d17b5d07-41bc-4ab5-b14d-c36ed19c5098\") " pod="openstack/barbican-8fe7-account-create-update-zd7vb" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.228638 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cea963bb-084e-4ba7-98b6-342e509be13f-config-data\") pod \"keystone-db-sync-cckg8\" (UID: \"cea963bb-084e-4ba7-98b6-342e509be13f\") " pod="openstack/keystone-db-sync-cckg8" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.228699 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sc2h\" (UniqueName: \"kubernetes.io/projected/c8e20559-4e85-40ad-a733-41497a1772d1-kube-api-access-9sc2h\") pod \"cinder-9eef-account-create-update-ldszm\" (UID: \"c8e20559-4e85-40ad-a733-41497a1772d1\") " pod="openstack/cinder-9eef-account-create-update-ldszm" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.228751 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8vkf\" (UniqueName: \"kubernetes.io/projected/d17b5d07-41bc-4ab5-b14d-c36ed19c5098-kube-api-access-n8vkf\") pod \"barbican-8fe7-account-create-update-zd7vb\" (UID: \"d17b5d07-41bc-4ab5-b14d-c36ed19c5098\") " pod="openstack/barbican-8fe7-account-create-update-zd7vb" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.228796 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjklf\" (UniqueName: \"kubernetes.io/projected/cea963bb-084e-4ba7-98b6-342e509be13f-kube-api-access-qjklf\") pod \"keystone-db-sync-cckg8\" (UID: \"cea963bb-084e-4ba7-98b6-342e509be13f\") " pod="openstack/keystone-db-sync-cckg8" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.229726 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8e20559-4e85-40ad-a733-41497a1772d1-operator-scripts\") pod \"cinder-9eef-account-create-update-ldszm\" (UID: \"c8e20559-4e85-40ad-a733-41497a1772d1\") " pod="openstack/cinder-9eef-account-create-update-ldszm" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.230316 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d17b5d07-41bc-4ab5-b14d-c36ed19c5098-operator-scripts\") pod \"barbican-8fe7-account-create-update-zd7vb\" (UID: \"d17b5d07-41bc-4ab5-b14d-c36ed19c5098\") " pod="openstack/barbican-8fe7-account-create-update-zd7vb" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.269056 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-ckfdv"] Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.270293 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-ckfdv" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.294867 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-ckfdv"] Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.330524 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cea963bb-084e-4ba7-98b6-342e509be13f-config-data\") pod \"keystone-db-sync-cckg8\" (UID: \"cea963bb-084e-4ba7-98b6-342e509be13f\") " pod="openstack/keystone-db-sync-cckg8" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.330760 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjklf\" (UniqueName: \"kubernetes.io/projected/cea963bb-084e-4ba7-98b6-342e509be13f-kube-api-access-qjklf\") pod \"keystone-db-sync-cckg8\" (UID: \"cea963bb-084e-4ba7-98b6-342e509be13f\") " pod="openstack/keystone-db-sync-cckg8" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.330816 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cea963bb-084e-4ba7-98b6-342e509be13f-combined-ca-bundle\") pod \"keystone-db-sync-cckg8\" (UID: \"cea963bb-084e-4ba7-98b6-342e509be13f\") " pod="openstack/keystone-db-sync-cckg8" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.363641 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8vkf\" (UniqueName: \"kubernetes.io/projected/d17b5d07-41bc-4ab5-b14d-c36ed19c5098-kube-api-access-n8vkf\") pod \"barbican-8fe7-account-create-update-zd7vb\" (UID: \"d17b5d07-41bc-4ab5-b14d-c36ed19c5098\") " pod="openstack/barbican-8fe7-account-create-update-zd7vb" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.367203 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sc2h\" (UniqueName: \"kubernetes.io/projected/c8e20559-4e85-40ad-a733-41497a1772d1-kube-api-access-9sc2h\") pod \"cinder-9eef-account-create-update-ldszm\" (UID: \"c8e20559-4e85-40ad-a733-41497a1772d1\") " pod="openstack/cinder-9eef-account-create-update-ldszm" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.368164 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8fe7-account-create-update-zd7vb" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.368379 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cea963bb-084e-4ba7-98b6-342e509be13f-config-data\") pod \"keystone-db-sync-cckg8\" (UID: \"cea963bb-084e-4ba7-98b6-342e509be13f\") " pod="openstack/keystone-db-sync-cckg8" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.369130 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjklf\" (UniqueName: \"kubernetes.io/projected/cea963bb-084e-4ba7-98b6-342e509be13f-kube-api-access-qjklf\") pod \"keystone-db-sync-cckg8\" (UID: \"cea963bb-084e-4ba7-98b6-342e509be13f\") " pod="openstack/keystone-db-sync-cckg8" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.369213 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cea963bb-084e-4ba7-98b6-342e509be13f-combined-ca-bundle\") pod \"keystone-db-sync-cckg8\" (UID: \"cea963bb-084e-4ba7-98b6-342e509be13f\") " pod="openstack/keystone-db-sync-cckg8" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.432794 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69673190-e2b6-4bdc-b00c-0cef9815317b-operator-scripts\") pod \"neutron-db-create-ckfdv\" (UID: \"69673190-e2b6-4bdc-b00c-0cef9815317b\") " pod="openstack/neutron-db-create-ckfdv" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.433188 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz8wn\" (UniqueName: \"kubernetes.io/projected/69673190-e2b6-4bdc-b00c-0cef9815317b-kube-api-access-wz8wn\") pod \"neutron-db-create-ckfdv\" (UID: \"69673190-e2b6-4bdc-b00c-0cef9815317b\") " pod="openstack/neutron-db-create-ckfdv" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.447534 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-d3b7-account-create-update-lkzdb"] Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.448694 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d3b7-account-create-update-lkzdb" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.461432 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.473492 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-d3b7-account-create-update-lkzdb"] Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.534033 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69673190-e2b6-4bdc-b00c-0cef9815317b-operator-scripts\") pod \"neutron-db-create-ckfdv\" (UID: \"69673190-e2b6-4bdc-b00c-0cef9815317b\") " pod="openstack/neutron-db-create-ckfdv" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.534078 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9t96m\" (UniqueName: \"kubernetes.io/projected/f4277c56-4293-4fd6-90e0-ddca7529ef0e-kube-api-access-9t96m\") pod \"neutron-d3b7-account-create-update-lkzdb\" (UID: \"f4277c56-4293-4fd6-90e0-ddca7529ef0e\") " pod="openstack/neutron-d3b7-account-create-update-lkzdb" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.534132 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz8wn\" (UniqueName: \"kubernetes.io/projected/69673190-e2b6-4bdc-b00c-0cef9815317b-kube-api-access-wz8wn\") pod \"neutron-db-create-ckfdv\" (UID: \"69673190-e2b6-4bdc-b00c-0cef9815317b\") " pod="openstack/neutron-db-create-ckfdv" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.534190 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4277c56-4293-4fd6-90e0-ddca7529ef0e-operator-scripts\") pod \"neutron-d3b7-account-create-update-lkzdb\" (UID: \"f4277c56-4293-4fd6-90e0-ddca7529ef0e\") " pod="openstack/neutron-d3b7-account-create-update-lkzdb" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.534343 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-cckg8" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.537117 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12f6110a-f054-4b5a-a8b9-16ee2395d922" path="/var/lib/kubelet/pods/12f6110a-f054-4b5a-a8b9-16ee2395d922/volumes" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.537447 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69673190-e2b6-4bdc-b00c-0cef9815317b-operator-scripts\") pod \"neutron-db-create-ckfdv\" (UID: \"69673190-e2b6-4bdc-b00c-0cef9815317b\") " pod="openstack/neutron-db-create-ckfdv" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.558028 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz8wn\" (UniqueName: \"kubernetes.io/projected/69673190-e2b6-4bdc-b00c-0cef9815317b-kube-api-access-wz8wn\") pod \"neutron-db-create-ckfdv\" (UID: \"69673190-e2b6-4bdc-b00c-0cef9815317b\") " pod="openstack/neutron-db-create-ckfdv" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.582267 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-9eef-account-create-update-ldszm" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.631071 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-6wr5s"] Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.635834 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4277c56-4293-4fd6-90e0-ddca7529ef0e-operator-scripts\") pod \"neutron-d3b7-account-create-update-lkzdb\" (UID: \"f4277c56-4293-4fd6-90e0-ddca7529ef0e\") " pod="openstack/neutron-d3b7-account-create-update-lkzdb" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.636001 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9t96m\" (UniqueName: \"kubernetes.io/projected/f4277c56-4293-4fd6-90e0-ddca7529ef0e-kube-api-access-9t96m\") pod \"neutron-d3b7-account-create-update-lkzdb\" (UID: \"f4277c56-4293-4fd6-90e0-ddca7529ef0e\") " pod="openstack/neutron-d3b7-account-create-update-lkzdb" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.638173 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4277c56-4293-4fd6-90e0-ddca7529ef0e-operator-scripts\") pod \"neutron-d3b7-account-create-update-lkzdb\" (UID: \"f4277c56-4293-4fd6-90e0-ddca7529ef0e\") " pod="openstack/neutron-d3b7-account-create-update-lkzdb" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.654021 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-9fh77" event={"ID":"145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2","Type":"ContainerStarted","Data":"0fbd794241f035aa14cb346f7df5823e22e87535dbf87ceb79e0b384590e4ce0"} Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.656251 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9t96m\" (UniqueName: \"kubernetes.io/projected/f4277c56-4293-4fd6-90e0-ddca7529ef0e-kube-api-access-9t96m\") pod \"neutron-d3b7-account-create-update-lkzdb\" (UID: \"f4277c56-4293-4fd6-90e0-ddca7529ef0e\") " pod="openstack/neutron-d3b7-account-create-update-lkzdb" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.665481 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-ckfdv" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.678958 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/root-account-create-update-9fh77" podStartSLOduration=9.678934091 podStartE2EDuration="9.678934091s" podCreationTimestamp="2026-02-02 11:11:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:11:58.67586385 +0000 UTC m=+1113.012964898" watchObservedRunningTime="2026-02-02 11:11:58.678934091 +0000 UTC m=+1113.016035129" Feb 02 11:11:58 crc kubenswrapper[4838]: I0202 11:11:58.781076 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d3b7-account-create-update-lkzdb" Feb 02 11:11:59 crc kubenswrapper[4838]: I0202 11:11:59.086938 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8fe7-account-create-update-zd7vb"] Feb 02 11:11:59 crc kubenswrapper[4838]: I0202 11:11:59.207895 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-4bd92"] Feb 02 11:11:59 crc kubenswrapper[4838]: I0202 11:11:59.319175 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-9eef-account-create-update-ldszm"] Feb 02 11:11:59 crc kubenswrapper[4838]: I0202 11:11:59.349268 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-cckg8"] Feb 02 11:11:59 crc kubenswrapper[4838]: W0202 11:11:59.382270 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc8e20559_4e85_40ad_a733_41497a1772d1.slice/crio-cc14f26a89a12073ba5510d67a5e2ac56bba3fe5e68154a6369fee25a071587d WatchSource:0}: Error finding container cc14f26a89a12073ba5510d67a5e2ac56bba3fe5e68154a6369fee25a071587d: Status 404 returned error can't find the container with id cc14f26a89a12073ba5510d67a5e2ac56bba3fe5e68154a6369fee25a071587d Feb 02 11:11:59 crc kubenswrapper[4838]: I0202 11:11:59.584922 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-d3b7-account-create-update-lkzdb"] Feb 02 11:11:59 crc kubenswrapper[4838]: I0202 11:11:59.610032 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-ckfdv"] Feb 02 11:11:59 crc kubenswrapper[4838]: I0202 11:11:59.668189 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8fe7-account-create-update-zd7vb" event={"ID":"d17b5d07-41bc-4ab5-b14d-c36ed19c5098","Type":"ContainerStarted","Data":"5b8e0b6cf4387d955454aeb81e3ca0b91be8bad0f03180e758f196c17554e112"} Feb 02 11:11:59 crc kubenswrapper[4838]: I0202 11:11:59.672444 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-ckfdv" event={"ID":"69673190-e2b6-4bdc-b00c-0cef9815317b","Type":"ContainerStarted","Data":"7c909f6e961332f77e24411620fceb30e3869c22c36959bf4d4a342ef4fde587"} Feb 02 11:11:59 crc kubenswrapper[4838]: I0202 11:11:59.679921 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-9eef-account-create-update-ldszm" event={"ID":"c8e20559-4e85-40ad-a733-41497a1772d1","Type":"ContainerStarted","Data":"cc14f26a89a12073ba5510d67a5e2ac56bba3fe5e68154a6369fee25a071587d"} Feb 02 11:11:59 crc kubenswrapper[4838]: I0202 11:11:59.689474 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-snvfg" event={"ID":"54de1caa-888f-433a-be5e-87b93932abc2","Type":"ContainerStarted","Data":"c467002944aa28ab1606b991f49e609351c7d6cd2c5e92143ff8d7880235d978"} Feb 02 11:11:59 crc kubenswrapper[4838]: I0202 11:11:59.693740 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-6wr5s" event={"ID":"e57ac670-eea3-4857-8990-872ce1dba0e4","Type":"ContainerStarted","Data":"16dc7743af77b3d501c26563d4ecfb9bc1955ce5c7b64bd905ac9ef9d33242d6"} Feb 02 11:11:59 crc kubenswrapper[4838]: I0202 11:11:59.693783 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-6wr5s" event={"ID":"e57ac670-eea3-4857-8990-872ce1dba0e4","Type":"ContainerStarted","Data":"2890c593afa8ec4b334d7fceedcb23de30e6efb0353e7dd44f54bba8b9f81542"} Feb 02 11:11:59 crc kubenswrapper[4838]: I0202 11:11:59.695775 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d3b7-account-create-update-lkzdb" event={"ID":"f4277c56-4293-4fd6-90e0-ddca7529ef0e","Type":"ContainerStarted","Data":"437f3d91591823d70f98dca0095bba5a57b82031026414dddb8cf688bc7dc1e5"} Feb 02 11:11:59 crc kubenswrapper[4838]: I0202 11:11:59.696487 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-4bd92" event={"ID":"6fdc9ecb-a604-4618-89a6-fd5a2237f10c","Type":"ContainerStarted","Data":"618e489e4cf03ab75aa4d33a82438979aace1767efa58d402bcb3a3ea0d04fd7"} Feb 02 11:11:59 crc kubenswrapper[4838]: I0202 11:11:59.697983 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-cckg8" event={"ID":"cea963bb-084e-4ba7-98b6-342e509be13f","Type":"ContainerStarted","Data":"2214f89c1b3ab127d3d500bc28f0bde87a1608a69c10c6c3a9dcfa1bd0446e68"} Feb 02 11:11:59 crc kubenswrapper[4838]: I0202 11:11:59.712566 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-snvfg" podStartSLOduration=5.352392194 podStartE2EDuration="18.712546405s" podCreationTimestamp="2026-02-02 11:11:41 +0000 UTC" firstStartedPulling="2026-02-02 11:11:42.591107426 +0000 UTC m=+1096.928208454" lastFinishedPulling="2026-02-02 11:11:55.951261647 +0000 UTC m=+1110.288362665" observedRunningTime="2026-02-02 11:11:59.709834804 +0000 UTC m=+1114.046935852" watchObservedRunningTime="2026-02-02 11:11:59.712546405 +0000 UTC m=+1114.049647453" Feb 02 11:11:59 crc kubenswrapper[4838]: I0202 11:11:59.734770 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-6wr5s" podStartSLOduration=2.734751351 podStartE2EDuration="2.734751351s" podCreationTimestamp="2026-02-02 11:11:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:11:59.728583238 +0000 UTC m=+1114.065684266" watchObservedRunningTime="2026-02-02 11:11:59.734751351 +0000 UTC m=+1114.071852379" Feb 02 11:12:00 crc kubenswrapper[4838]: I0202 11:12:00.712768 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8fe7-account-create-update-zd7vb" event={"ID":"d17b5d07-41bc-4ab5-b14d-c36ed19c5098","Type":"ContainerStarted","Data":"20015e6144fcc4eb75dfc9ff448234448937717bffbe119fe6d0fc467448c1eb"} Feb 02 11:12:01 crc kubenswrapper[4838]: I0202 11:12:01.721089 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-ckfdv" event={"ID":"69673190-e2b6-4bdc-b00c-0cef9815317b","Type":"ContainerStarted","Data":"769549457e214173337a68edd07eaed24af9846d43d4dab4959aadc5a25eb0b4"} Feb 02 11:12:01 crc kubenswrapper[4838]: I0202 11:12:01.723183 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-9eef-account-create-update-ldszm" event={"ID":"c8e20559-4e85-40ad-a733-41497a1772d1","Type":"ContainerStarted","Data":"e869a9edd2974a471ccbde9676d4bca1acd45020dc23e073eab3aa52708946c6"} Feb 02 11:12:01 crc kubenswrapper[4838]: I0202 11:12:01.724443 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d3b7-account-create-update-lkzdb" event={"ID":"f4277c56-4293-4fd6-90e0-ddca7529ef0e","Type":"ContainerStarted","Data":"6f13a866bdd5fed5042948bb20c125fd7fd68dbd4011c30311f9e3998c7d4a8a"} Feb 02 11:12:01 crc kubenswrapper[4838]: I0202 11:12:01.725586 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-4bd92" event={"ID":"6fdc9ecb-a604-4618-89a6-fd5a2237f10c","Type":"ContainerStarted","Data":"b86bfb431043af4342c1698232e0b1d88f1ad477003300c1587d851498159b51"} Feb 02 11:12:01 crc kubenswrapper[4838]: I0202 11:12:01.741580 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-8fe7-account-create-update-zd7vb" podStartSLOduration=3.741561436 podStartE2EDuration="3.741561436s" podCreationTimestamp="2026-02-02 11:11:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:12:01.737033067 +0000 UTC m=+1116.074134095" watchObservedRunningTime="2026-02-02 11:12:01.741561436 +0000 UTC m=+1116.078662464" Feb 02 11:12:02 crc kubenswrapper[4838]: I0202 11:12:02.755966 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-d3b7-account-create-update-lkzdb" podStartSLOduration=4.755947124 podStartE2EDuration="4.755947124s" podCreationTimestamp="2026-02-02 11:11:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:12:02.752062571 +0000 UTC m=+1117.089163609" watchObservedRunningTime="2026-02-02 11:12:02.755947124 +0000 UTC m=+1117.093048152" Feb 02 11:12:02 crc kubenswrapper[4838]: I0202 11:12:02.777269 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-4bd92" podStartSLOduration=5.777251125 podStartE2EDuration="5.777251125s" podCreationTimestamp="2026-02-02 11:11:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:12:02.774048411 +0000 UTC m=+1117.111149469" watchObservedRunningTime="2026-02-02 11:12:02.777251125 +0000 UTC m=+1117.114352173" Feb 02 11:12:02 crc kubenswrapper[4838]: I0202 11:12:02.797029 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-ckfdv" podStartSLOduration=4.796855812 podStartE2EDuration="4.796855812s" podCreationTimestamp="2026-02-02 11:11:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:12:02.791272435 +0000 UTC m=+1117.128373503" watchObservedRunningTime="2026-02-02 11:12:02.796855812 +0000 UTC m=+1117.133956850" Feb 02 11:12:06 crc kubenswrapper[4838]: I0202 11:12:06.534281 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-9eef-account-create-update-ldszm" podStartSLOduration=9.534259231 podStartE2EDuration="9.534259231s" podCreationTimestamp="2026-02-02 11:11:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:12:02.81801417 +0000 UTC m=+1117.155115228" watchObservedRunningTime="2026-02-02 11:12:06.534259231 +0000 UTC m=+1120.871360259" Feb 02 11:12:18 crc kubenswrapper[4838]: I0202 11:12:18.902266 4838 generic.go:334] "Generic (PLEG): container finished" podID="145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2" containerID="0fbd794241f035aa14cb346f7df5823e22e87535dbf87ceb79e0b384590e4ce0" exitCode=0 Feb 02 11:12:18 crc kubenswrapper[4838]: I0202 11:12:18.902320 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-9fh77" event={"ID":"145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2","Type":"ContainerDied","Data":"0fbd794241f035aa14cb346f7df5823e22e87535dbf87ceb79e0b384590e4ce0"} Feb 02 11:12:19 crc kubenswrapper[4838]: E0202 11:12:19.407794 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-swift-account:current-podified" Feb 02 11:12:19 crc kubenswrapper[4838]: E0202 11:12:19.408000 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:account-server,Image:quay.io/podified-antelope-centos9/openstack-swift-account:current-podified,Command:[/usr/bin/swift-account-server /etc/swift/account-server.conf.d -v],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:account,HostPort:0,ContainerPort:6202,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n675h5d7h5dh5c4h74h66fh54ch686h5bh559hcfh674h5bdh5b9hd6h54ch59fh65bh5fbh567hf7h66dhd9h54fh664h697h65dh57bh545h54dh56hc4q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:swift,ReadOnly:false,MountPath:/srv/node/pv,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:etc-swift,ReadOnly:false,MountPath:/etc/swift,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cache,ReadOnly:false,MountPath:/var/cache/swift,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:lock,ReadOnly:false,MountPath:/var/lock,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4bgff,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42445,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-storage-0_openstack(12e53950-9847-46b2-a51a-1fac5b690098): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:12:19 crc kubenswrapper[4838]: I0202 11:12:19.911495 4838 generic.go:334] "Generic (PLEG): container finished" podID="e57ac670-eea3-4857-8990-872ce1dba0e4" containerID="16dc7743af77b3d501c26563d4ecfb9bc1955ce5c7b64bd905ac9ef9d33242d6" exitCode=0 Feb 02 11:12:19 crc kubenswrapper[4838]: I0202 11:12:19.911609 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-6wr5s" event={"ID":"e57ac670-eea3-4857-8990-872ce1dba0e4","Type":"ContainerDied","Data":"16dc7743af77b3d501c26563d4ecfb9bc1955ce5c7b64bd905ac9ef9d33242d6"} Feb 02 11:12:20 crc kubenswrapper[4838]: I0202 11:12:20.251289 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-9fh77" Feb 02 11:12:20 crc kubenswrapper[4838]: I0202 11:12:20.296178 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ft6pd\" (UniqueName: \"kubernetes.io/projected/145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2-kube-api-access-ft6pd\") pod \"145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2\" (UID: \"145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2\") " Feb 02 11:12:20 crc kubenswrapper[4838]: I0202 11:12:20.296438 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2-operator-scripts\") pod \"145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2\" (UID: \"145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2\") " Feb 02 11:12:20 crc kubenswrapper[4838]: I0202 11:12:20.297092 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2" (UID: "145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:12:20 crc kubenswrapper[4838]: I0202 11:12:20.304484 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2-kube-api-access-ft6pd" (OuterVolumeSpecName: "kube-api-access-ft6pd") pod "145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2" (UID: "145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2"). InnerVolumeSpecName "kube-api-access-ft6pd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:12:20 crc kubenswrapper[4838]: I0202 11:12:20.398482 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ft6pd\" (UniqueName: \"kubernetes.io/projected/145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2-kube-api-access-ft6pd\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:20 crc kubenswrapper[4838]: I0202 11:12:20.398519 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:20 crc kubenswrapper[4838]: I0202 11:12:20.929343 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-9fh77" event={"ID":"145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2","Type":"ContainerDied","Data":"c1f47d75afc51028bdba7f937241b48410ab00ee6c706b175cb10e667d48d281"} Feb 02 11:12:20 crc kubenswrapper[4838]: I0202 11:12:20.929425 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1f47d75afc51028bdba7f937241b48410ab00ee6c706b175cb10e667d48d281" Feb 02 11:12:20 crc kubenswrapper[4838]: I0202 11:12:20.929566 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-9fh77" Feb 02 11:12:20 crc kubenswrapper[4838]: I0202 11:12:20.933189 4838 generic.go:334] "Generic (PLEG): container finished" podID="6fdc9ecb-a604-4618-89a6-fd5a2237f10c" containerID="b86bfb431043af4342c1698232e0b1d88f1ad477003300c1587d851498159b51" exitCode=0 Feb 02 11:12:20 crc kubenswrapper[4838]: I0202 11:12:20.934144 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-4bd92" event={"ID":"6fdc9ecb-a604-4618-89a6-fd5a2237f10c","Type":"ContainerDied","Data":"b86bfb431043af4342c1698232e0b1d88f1ad477003300c1587d851498159b51"} Feb 02 11:12:20 crc kubenswrapper[4838]: I0202 11:12:20.938232 4838 generic.go:334] "Generic (PLEG): container finished" podID="69673190-e2b6-4bdc-b00c-0cef9815317b" containerID="769549457e214173337a68edd07eaed24af9846d43d4dab4959aadc5a25eb0b4" exitCode=0 Feb 02 11:12:20 crc kubenswrapper[4838]: I0202 11:12:20.938549 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-ckfdv" event={"ID":"69673190-e2b6-4bdc-b00c-0cef9815317b","Type":"ContainerDied","Data":"769549457e214173337a68edd07eaed24af9846d43d4dab4959aadc5a25eb0b4"} Feb 02 11:12:22 crc kubenswrapper[4838]: I0202 11:12:22.954732 4838 generic.go:334] "Generic (PLEG): container finished" podID="c8e20559-4e85-40ad-a733-41497a1772d1" containerID="e869a9edd2974a471ccbde9676d4bca1acd45020dc23e073eab3aa52708946c6" exitCode=0 Feb 02 11:12:22 crc kubenswrapper[4838]: I0202 11:12:22.954832 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-9eef-account-create-update-ldszm" event={"ID":"c8e20559-4e85-40ad-a733-41497a1772d1","Type":"ContainerDied","Data":"e869a9edd2974a471ccbde9676d4bca1acd45020dc23e073eab3aa52708946c6"} Feb 02 11:12:22 crc kubenswrapper[4838]: I0202 11:12:22.958373 4838 generic.go:334] "Generic (PLEG): container finished" podID="d17b5d07-41bc-4ab5-b14d-c36ed19c5098" containerID="20015e6144fcc4eb75dfc9ff448234448937717bffbe119fe6d0fc467448c1eb" exitCode=0 Feb 02 11:12:22 crc kubenswrapper[4838]: I0202 11:12:22.958398 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8fe7-account-create-update-zd7vb" event={"ID":"d17b5d07-41bc-4ab5-b14d-c36ed19c5098","Type":"ContainerDied","Data":"20015e6144fcc4eb75dfc9ff448234448937717bffbe119fe6d0fc467448c1eb"} Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.658929 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-ckfdv" Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.663520 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6wr5s" Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.768518 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69673190-e2b6-4bdc-b00c-0cef9815317b-operator-scripts\") pod \"69673190-e2b6-4bdc-b00c-0cef9815317b\" (UID: \"69673190-e2b6-4bdc-b00c-0cef9815317b\") " Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.768627 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kpd8r\" (UniqueName: \"kubernetes.io/projected/e57ac670-eea3-4857-8990-872ce1dba0e4-kube-api-access-kpd8r\") pod \"e57ac670-eea3-4857-8990-872ce1dba0e4\" (UID: \"e57ac670-eea3-4857-8990-872ce1dba0e4\") " Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.769377 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69673190-e2b6-4bdc-b00c-0cef9815317b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "69673190-e2b6-4bdc-b00c-0cef9815317b" (UID: "69673190-e2b6-4bdc-b00c-0cef9815317b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.769773 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wz8wn\" (UniqueName: \"kubernetes.io/projected/69673190-e2b6-4bdc-b00c-0cef9815317b-kube-api-access-wz8wn\") pod \"69673190-e2b6-4bdc-b00c-0cef9815317b\" (UID: \"69673190-e2b6-4bdc-b00c-0cef9815317b\") " Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.769816 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e57ac670-eea3-4857-8990-872ce1dba0e4-operator-scripts\") pod \"e57ac670-eea3-4857-8990-872ce1dba0e4\" (UID: \"e57ac670-eea3-4857-8990-872ce1dba0e4\") " Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.770126 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69673190-e2b6-4bdc-b00c-0cef9815317b-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.770245 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e57ac670-eea3-4857-8990-872ce1dba0e4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e57ac670-eea3-4857-8990-872ce1dba0e4" (UID: "e57ac670-eea3-4857-8990-872ce1dba0e4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.775651 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e57ac670-eea3-4857-8990-872ce1dba0e4-kube-api-access-kpd8r" (OuterVolumeSpecName: "kube-api-access-kpd8r") pod "e57ac670-eea3-4857-8990-872ce1dba0e4" (UID: "e57ac670-eea3-4857-8990-872ce1dba0e4"). InnerVolumeSpecName "kube-api-access-kpd8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.776123 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69673190-e2b6-4bdc-b00c-0cef9815317b-kube-api-access-wz8wn" (OuterVolumeSpecName: "kube-api-access-wz8wn") pod "69673190-e2b6-4bdc-b00c-0cef9815317b" (UID: "69673190-e2b6-4bdc-b00c-0cef9815317b"). InnerVolumeSpecName "kube-api-access-wz8wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.872196 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kpd8r\" (UniqueName: \"kubernetes.io/projected/e57ac670-eea3-4857-8990-872ce1dba0e4-kube-api-access-kpd8r\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.872246 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e57ac670-eea3-4857-8990-872ce1dba0e4-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.872258 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wz8wn\" (UniqueName: \"kubernetes.io/projected/69673190-e2b6-4bdc-b00c-0cef9815317b-kube-api-access-wz8wn\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.970434 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-ckfdv" event={"ID":"69673190-e2b6-4bdc-b00c-0cef9815317b","Type":"ContainerDied","Data":"7c909f6e961332f77e24411620fceb30e3869c22c36959bf4d4a342ef4fde587"} Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.970492 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c909f6e961332f77e24411620fceb30e3869c22c36959bf4d4a342ef4fde587" Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.970509 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-ckfdv" Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.984181 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-6wr5s" event={"ID":"e57ac670-eea3-4857-8990-872ce1dba0e4","Type":"ContainerDied","Data":"2890c593afa8ec4b334d7fceedcb23de30e6efb0353e7dd44f54bba8b9f81542"} Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.984218 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6wr5s" Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.984232 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2890c593afa8ec4b334d7fceedcb23de30e6efb0353e7dd44f54bba8b9f81542" Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.986390 4838 generic.go:334] "Generic (PLEG): container finished" podID="f4277c56-4293-4fd6-90e0-ddca7529ef0e" containerID="6f13a866bdd5fed5042948bb20c125fd7fd68dbd4011c30311f9e3998c7d4a8a" exitCode=0 Feb 02 11:12:23 crc kubenswrapper[4838]: I0202 11:12:23.986436 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d3b7-account-create-update-lkzdb" event={"ID":"f4277c56-4293-4fd6-90e0-ddca7529ef0e","Type":"ContainerDied","Data":"6f13a866bdd5fed5042948bb20c125fd7fd68dbd4011c30311f9e3998c7d4a8a"} Feb 02 11:12:24 crc kubenswrapper[4838]: I0202 11:12:24.139662 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-4bd92" Feb 02 11:12:24 crc kubenswrapper[4838]: I0202 11:12:24.284671 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6fdc9ecb-a604-4618-89a6-fd5a2237f10c-operator-scripts\") pod \"6fdc9ecb-a604-4618-89a6-fd5a2237f10c\" (UID: \"6fdc9ecb-a604-4618-89a6-fd5a2237f10c\") " Feb 02 11:12:24 crc kubenswrapper[4838]: I0202 11:12:24.284719 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqp4b\" (UniqueName: \"kubernetes.io/projected/6fdc9ecb-a604-4618-89a6-fd5a2237f10c-kube-api-access-rqp4b\") pod \"6fdc9ecb-a604-4618-89a6-fd5a2237f10c\" (UID: \"6fdc9ecb-a604-4618-89a6-fd5a2237f10c\") " Feb 02 11:12:24 crc kubenswrapper[4838]: I0202 11:12:24.286034 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6fdc9ecb-a604-4618-89a6-fd5a2237f10c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6fdc9ecb-a604-4618-89a6-fd5a2237f10c" (UID: "6fdc9ecb-a604-4618-89a6-fd5a2237f10c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:12:24 crc kubenswrapper[4838]: I0202 11:12:24.288742 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fdc9ecb-a604-4618-89a6-fd5a2237f10c-kube-api-access-rqp4b" (OuterVolumeSpecName: "kube-api-access-rqp4b") pod "6fdc9ecb-a604-4618-89a6-fd5a2237f10c" (UID: "6fdc9ecb-a604-4618-89a6-fd5a2237f10c"). InnerVolumeSpecName "kube-api-access-rqp4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:12:24 crc kubenswrapper[4838]: I0202 11:12:24.346760 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8fe7-account-create-update-zd7vb" Feb 02 11:12:24 crc kubenswrapper[4838]: I0202 11:12:24.354244 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-9eef-account-create-update-ldszm" Feb 02 11:12:24 crc kubenswrapper[4838]: I0202 11:12:24.387174 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6fdc9ecb-a604-4618-89a6-fd5a2237f10c-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:24 crc kubenswrapper[4838]: I0202 11:12:24.387202 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqp4b\" (UniqueName: \"kubernetes.io/projected/6fdc9ecb-a604-4618-89a6-fd5a2237f10c-kube-api-access-rqp4b\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:24 crc kubenswrapper[4838]: I0202 11:12:24.487837 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8e20559-4e85-40ad-a733-41497a1772d1-operator-scripts\") pod \"c8e20559-4e85-40ad-a733-41497a1772d1\" (UID: \"c8e20559-4e85-40ad-a733-41497a1772d1\") " Feb 02 11:12:24 crc kubenswrapper[4838]: I0202 11:12:24.488185 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9sc2h\" (UniqueName: \"kubernetes.io/projected/c8e20559-4e85-40ad-a733-41497a1772d1-kube-api-access-9sc2h\") pod \"c8e20559-4e85-40ad-a733-41497a1772d1\" (UID: \"c8e20559-4e85-40ad-a733-41497a1772d1\") " Feb 02 11:12:24 crc kubenswrapper[4838]: I0202 11:12:24.488336 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8vkf\" (UniqueName: \"kubernetes.io/projected/d17b5d07-41bc-4ab5-b14d-c36ed19c5098-kube-api-access-n8vkf\") pod \"d17b5d07-41bc-4ab5-b14d-c36ed19c5098\" (UID: \"d17b5d07-41bc-4ab5-b14d-c36ed19c5098\") " Feb 02 11:12:24 crc kubenswrapper[4838]: I0202 11:12:24.488564 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d17b5d07-41bc-4ab5-b14d-c36ed19c5098-operator-scripts\") pod \"d17b5d07-41bc-4ab5-b14d-c36ed19c5098\" (UID: \"d17b5d07-41bc-4ab5-b14d-c36ed19c5098\") " Feb 02 11:12:24 crc kubenswrapper[4838]: I0202 11:12:24.488614 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8e20559-4e85-40ad-a733-41497a1772d1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c8e20559-4e85-40ad-a733-41497a1772d1" (UID: "c8e20559-4e85-40ad-a733-41497a1772d1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:12:24 crc kubenswrapper[4838]: I0202 11:12:24.488982 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d17b5d07-41bc-4ab5-b14d-c36ed19c5098-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d17b5d07-41bc-4ab5-b14d-c36ed19c5098" (UID: "d17b5d07-41bc-4ab5-b14d-c36ed19c5098"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:12:24 crc kubenswrapper[4838]: I0202 11:12:24.489190 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d17b5d07-41bc-4ab5-b14d-c36ed19c5098-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:24 crc kubenswrapper[4838]: I0202 11:12:24.489312 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8e20559-4e85-40ad-a733-41497a1772d1-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:24 crc kubenswrapper[4838]: I0202 11:12:24.492432 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8e20559-4e85-40ad-a733-41497a1772d1-kube-api-access-9sc2h" (OuterVolumeSpecName: "kube-api-access-9sc2h") pod "c8e20559-4e85-40ad-a733-41497a1772d1" (UID: "c8e20559-4e85-40ad-a733-41497a1772d1"). InnerVolumeSpecName "kube-api-access-9sc2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:12:24 crc kubenswrapper[4838]: I0202 11:12:24.492519 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d17b5d07-41bc-4ab5-b14d-c36ed19c5098-kube-api-access-n8vkf" (OuterVolumeSpecName: "kube-api-access-n8vkf") pod "d17b5d07-41bc-4ab5-b14d-c36ed19c5098" (UID: "d17b5d07-41bc-4ab5-b14d-c36ed19c5098"). InnerVolumeSpecName "kube-api-access-n8vkf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:12:24 crc kubenswrapper[4838]: I0202 11:12:24.590870 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8vkf\" (UniqueName: \"kubernetes.io/projected/d17b5d07-41bc-4ab5-b14d-c36ed19c5098-kube-api-access-n8vkf\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:24 crc kubenswrapper[4838]: I0202 11:12:24.591260 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9sc2h\" (UniqueName: \"kubernetes.io/projected/c8e20559-4e85-40ad-a733-41497a1772d1-kube-api-access-9sc2h\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:25 crc kubenswrapper[4838]: I0202 11:12:25.019856 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8fe7-account-create-update-zd7vb" event={"ID":"d17b5d07-41bc-4ab5-b14d-c36ed19c5098","Type":"ContainerDied","Data":"5b8e0b6cf4387d955454aeb81e3ca0b91be8bad0f03180e758f196c17554e112"} Feb 02 11:12:25 crc kubenswrapper[4838]: I0202 11:12:25.019901 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b8e0b6cf4387d955454aeb81e3ca0b91be8bad0f03180e758f196c17554e112" Feb 02 11:12:25 crc kubenswrapper[4838]: I0202 11:12:25.019966 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8fe7-account-create-update-zd7vb" Feb 02 11:12:25 crc kubenswrapper[4838]: I0202 11:12:25.026345 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-9eef-account-create-update-ldszm" event={"ID":"c8e20559-4e85-40ad-a733-41497a1772d1","Type":"ContainerDied","Data":"cc14f26a89a12073ba5510d67a5e2ac56bba3fe5e68154a6369fee25a071587d"} Feb 02 11:12:25 crc kubenswrapper[4838]: I0202 11:12:25.026379 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc14f26a89a12073ba5510d67a5e2ac56bba3fe5e68154a6369fee25a071587d" Feb 02 11:12:25 crc kubenswrapper[4838]: I0202 11:12:25.026442 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-9eef-account-create-update-ldszm" Feb 02 11:12:25 crc kubenswrapper[4838]: I0202 11:12:25.028075 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-4bd92" event={"ID":"6fdc9ecb-a604-4618-89a6-fd5a2237f10c","Type":"ContainerDied","Data":"618e489e4cf03ab75aa4d33a82438979aace1767efa58d402bcb3a3ea0d04fd7"} Feb 02 11:12:25 crc kubenswrapper[4838]: I0202 11:12:25.028097 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="618e489e4cf03ab75aa4d33a82438979aace1767efa58d402bcb3a3ea0d04fd7" Feb 02 11:12:25 crc kubenswrapper[4838]: I0202 11:12:25.028130 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-4bd92" Feb 02 11:12:25 crc kubenswrapper[4838]: I0202 11:12:25.034916 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-cckg8" event={"ID":"cea963bb-084e-4ba7-98b6-342e509be13f","Type":"ContainerStarted","Data":"286d16d8368715d8e2a87addffe858bca8f702c15fe53b22bcaab13965c1e2ae"} Feb 02 11:12:25 crc kubenswrapper[4838]: I0202 11:12:25.041585 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"12e53950-9847-46b2-a51a-1fac5b690098","Type":"ContainerStarted","Data":"466fc6ad8be524da2b4a1060f0bfda30b8e657a138346a833d38fed537c29658"} Feb 02 11:12:25 crc kubenswrapper[4838]: I0202 11:12:25.041649 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"12e53950-9847-46b2-a51a-1fac5b690098","Type":"ContainerStarted","Data":"6dafd69815b93ccbe7928de25c6c97f474f4d3a17cb04f2403c5a189c6055b61"} Feb 02 11:12:25 crc kubenswrapper[4838]: I0202 11:12:25.041663 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"12e53950-9847-46b2-a51a-1fac5b690098","Type":"ContainerStarted","Data":"570714ece1713fecd3b8dc621abf60a10396f4b9cb6b19d54648cec18cb38ab2"} Feb 02 11:12:25 crc kubenswrapper[4838]: I0202 11:12:25.062783 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-cckg8" podStartSLOduration=2.278007536 podStartE2EDuration="27.062765892s" podCreationTimestamp="2026-02-02 11:11:58 +0000 UTC" firstStartedPulling="2026-02-02 11:11:59.377825459 +0000 UTC m=+1113.714926487" lastFinishedPulling="2026-02-02 11:12:24.162583815 +0000 UTC m=+1138.499684843" observedRunningTime="2026-02-02 11:12:25.057532934 +0000 UTC m=+1139.394633972" watchObservedRunningTime="2026-02-02 11:12:25.062765892 +0000 UTC m=+1139.399866920" Feb 02 11:12:25 crc kubenswrapper[4838]: I0202 11:12:25.298074 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d3b7-account-create-update-lkzdb" Feb 02 11:12:25 crc kubenswrapper[4838]: I0202 11:12:25.417502 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4277c56-4293-4fd6-90e0-ddca7529ef0e-operator-scripts\") pod \"f4277c56-4293-4fd6-90e0-ddca7529ef0e\" (UID: \"f4277c56-4293-4fd6-90e0-ddca7529ef0e\") " Feb 02 11:12:25 crc kubenswrapper[4838]: I0202 11:12:25.417842 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9t96m\" (UniqueName: \"kubernetes.io/projected/f4277c56-4293-4fd6-90e0-ddca7529ef0e-kube-api-access-9t96m\") pod \"f4277c56-4293-4fd6-90e0-ddca7529ef0e\" (UID: \"f4277c56-4293-4fd6-90e0-ddca7529ef0e\") " Feb 02 11:12:25 crc kubenswrapper[4838]: I0202 11:12:25.418098 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4277c56-4293-4fd6-90e0-ddca7529ef0e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f4277c56-4293-4fd6-90e0-ddca7529ef0e" (UID: "f4277c56-4293-4fd6-90e0-ddca7529ef0e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:12:25 crc kubenswrapper[4838]: I0202 11:12:25.418787 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4277c56-4293-4fd6-90e0-ddca7529ef0e-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:25 crc kubenswrapper[4838]: I0202 11:12:25.421374 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4277c56-4293-4fd6-90e0-ddca7529ef0e-kube-api-access-9t96m" (OuterVolumeSpecName: "kube-api-access-9t96m") pod "f4277c56-4293-4fd6-90e0-ddca7529ef0e" (UID: "f4277c56-4293-4fd6-90e0-ddca7529ef0e"). InnerVolumeSpecName "kube-api-access-9t96m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:12:25 crc kubenswrapper[4838]: I0202 11:12:25.521384 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9t96m\" (UniqueName: \"kubernetes.io/projected/f4277c56-4293-4fd6-90e0-ddca7529ef0e-kube-api-access-9t96m\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:26 crc kubenswrapper[4838]: I0202 11:12:26.052726 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d3b7-account-create-update-lkzdb" event={"ID":"f4277c56-4293-4fd6-90e0-ddca7529ef0e","Type":"ContainerDied","Data":"437f3d91591823d70f98dca0095bba5a57b82031026414dddb8cf688bc7dc1e5"} Feb 02 11:12:26 crc kubenswrapper[4838]: I0202 11:12:26.052996 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="437f3d91591823d70f98dca0095bba5a57b82031026414dddb8cf688bc7dc1e5" Feb 02 11:12:26 crc kubenswrapper[4838]: I0202 11:12:26.052742 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d3b7-account-create-update-lkzdb" Feb 02 11:12:26 crc kubenswrapper[4838]: I0202 11:12:26.055019 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"12e53950-9847-46b2-a51a-1fac5b690098","Type":"ContainerStarted","Data":"10881555b313401bbc59bfb31594af5da54e5129e1dc38794a4ca45ea179309a"} Feb 02 11:12:31 crc kubenswrapper[4838]: I0202 11:12:31.099225 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"12e53950-9847-46b2-a51a-1fac5b690098","Type":"ContainerStarted","Data":"fed5477077143a8bf5587462d5b70a7cfa62e037e9fb22ec3f7333c2260cca76"} Feb 02 11:12:31 crc kubenswrapper[4838]: I0202 11:12:31.099795 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"12e53950-9847-46b2-a51a-1fac5b690098","Type":"ContainerStarted","Data":"0d62b5e924b99483e02e754459d87c0c9b619c62f121f91fb615ef0a427a2820"} Feb 02 11:12:31 crc kubenswrapper[4838]: I0202 11:12:31.099808 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"12e53950-9847-46b2-a51a-1fac5b690098","Type":"ContainerStarted","Data":"eb9c803166d5173179ccc3f06a444fd9bacbd7e75ebbbfe122a7dca219f8e174"} Feb 02 11:12:32 crc kubenswrapper[4838]: I0202 11:12:32.111186 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"12e53950-9847-46b2-a51a-1fac5b690098","Type":"ContainerStarted","Data":"4241949d3c1bafca4d067429fab9ec54fd4c9057bb21c876383ff449c9a9e415"} Feb 02 11:12:32 crc kubenswrapper[4838]: I0202 11:12:32.111445 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"12e53950-9847-46b2-a51a-1fac5b690098","Type":"ContainerStarted","Data":"f113a0bf7b001a81b0981d4fef6c65133e33c9ec5d1ac8319e652153a80acc0d"} Feb 02 11:12:32 crc kubenswrapper[4838]: I0202 11:12:32.111454 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"12e53950-9847-46b2-a51a-1fac5b690098","Type":"ContainerStarted","Data":"4f41554b64a65df3397bff3546e8f3a1be42d0cbac453a8ee6fe0789b632a417"} Feb 02 11:12:32 crc kubenswrapper[4838]: E0202 11:12:32.146038 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"account-server\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"account-replicator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-account:current-podified\\\"\", failed to \"StartContainer\" for \"account-auditor\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-account:current-podified\\\"\", failed to \"StartContainer\" for \"account-reaper\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-account:current-podified\\\"\"]" pod="openstack/swift-storage-0" podUID="12e53950-9847-46b2-a51a-1fac5b690098" Feb 02 11:12:33 crc kubenswrapper[4838]: I0202 11:12:33.124987 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"12e53950-9847-46b2-a51a-1fac5b690098","Type":"ContainerStarted","Data":"5f97571dc063156054cb33c32c8e1480494b8e91de61fff5e3b1475c57b66a2a"} Feb 02 11:12:34 crc kubenswrapper[4838]: I0202 11:12:34.164046 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"12e53950-9847-46b2-a51a-1fac5b690098","Type":"ContainerStarted","Data":"e06ce8c1dc963943284365f1e76418d43cc50d9d495ad052dbeb61d93e710371"} Feb 02 11:12:34 crc kubenswrapper[4838]: I0202 11:12:34.164572 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"12e53950-9847-46b2-a51a-1fac5b690098","Type":"ContainerStarted","Data":"6b0e93e14397f97428b631e5fe0954eb39ff638f09d8ea897bd7728a3fe9e12a"} Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.183020 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"12e53950-9847-46b2-a51a-1fac5b690098","Type":"ContainerStarted","Data":"b646d3b497bcc98a1956668d0f34800447e76a2e50a311c91202cb2272f57c2f"} Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.183091 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"12e53950-9847-46b2-a51a-1fac5b690098","Type":"ContainerStarted","Data":"62a46de3d45eaa848cbb578f9afc2c7d4f12bf6218de3c77b44479f0af9b6c14"} Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.233416 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=35.255138407 podStartE2EDuration="1m12.233393213s" podCreationTimestamp="2026-02-02 11:11:23 +0000 UTC" firstStartedPulling="2026-02-02 11:11:56.744670058 +0000 UTC m=+1111.081771086" lastFinishedPulling="2026-02-02 11:12:33.722924864 +0000 UTC m=+1148.060025892" observedRunningTime="2026-02-02 11:12:35.227025575 +0000 UTC m=+1149.564126613" watchObservedRunningTime="2026-02-02 11:12:35.233393213 +0000 UTC m=+1149.570494271" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.523530 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-svlph"] Feb 02 11:12:35 crc kubenswrapper[4838]: E0202 11:12:35.524508 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2" containerName="mariadb-account-create-update" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.524605 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2" containerName="mariadb-account-create-update" Feb 02 11:12:35 crc kubenswrapper[4838]: E0202 11:12:35.524726 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69673190-e2b6-4bdc-b00c-0cef9815317b" containerName="mariadb-database-create" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.524809 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="69673190-e2b6-4bdc-b00c-0cef9815317b" containerName="mariadb-database-create" Feb 02 11:12:35 crc kubenswrapper[4838]: E0202 11:12:35.524916 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fdc9ecb-a604-4618-89a6-fd5a2237f10c" containerName="mariadb-database-create" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.525197 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fdc9ecb-a604-4618-89a6-fd5a2237f10c" containerName="mariadb-database-create" Feb 02 11:12:35 crc kubenswrapper[4838]: E0202 11:12:35.525467 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8e20559-4e85-40ad-a733-41497a1772d1" containerName="mariadb-account-create-update" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.525618 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8e20559-4e85-40ad-a733-41497a1772d1" containerName="mariadb-account-create-update" Feb 02 11:12:35 crc kubenswrapper[4838]: E0202 11:12:35.525779 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4277c56-4293-4fd6-90e0-ddca7529ef0e" containerName="mariadb-account-create-update" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.525860 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4277c56-4293-4fd6-90e0-ddca7529ef0e" containerName="mariadb-account-create-update" Feb 02 11:12:35 crc kubenswrapper[4838]: E0202 11:12:35.525940 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e57ac670-eea3-4857-8990-872ce1dba0e4" containerName="mariadb-database-create" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.526013 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="e57ac670-eea3-4857-8990-872ce1dba0e4" containerName="mariadb-database-create" Feb 02 11:12:35 crc kubenswrapper[4838]: E0202 11:12:35.526101 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d17b5d07-41bc-4ab5-b14d-c36ed19c5098" containerName="mariadb-account-create-update" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.526174 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="d17b5d07-41bc-4ab5-b14d-c36ed19c5098" containerName="mariadb-account-create-update" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.526461 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="e57ac670-eea3-4857-8990-872ce1dba0e4" containerName="mariadb-database-create" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.526546 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4277c56-4293-4fd6-90e0-ddca7529ef0e" containerName="mariadb-account-create-update" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.526623 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="69673190-e2b6-4bdc-b00c-0cef9815317b" containerName="mariadb-database-create" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.526758 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2" containerName="mariadb-account-create-update" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.526833 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="d17b5d07-41bc-4ab5-b14d-c36ed19c5098" containerName="mariadb-account-create-update" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.526904 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fdc9ecb-a604-4618-89a6-fd5a2237f10c" containerName="mariadb-database-create" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.526983 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8e20559-4e85-40ad-a733-41497a1772d1" containerName="mariadb-account-create-update" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.528019 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.531454 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.532990 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-svlph"] Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.623490 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-svlph\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.623550 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-dns-svc\") pod \"dnsmasq-dns-764c5664d7-svlph\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.623703 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-svlph\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.623744 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsgx2\" (UniqueName: \"kubernetes.io/projected/04c9c0f3-24b8-4590-902e-bdd94bb01c25-kube-api-access-wsgx2\") pod \"dnsmasq-dns-764c5664d7-svlph\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.623760 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-svlph\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.623780 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-config\") pod \"dnsmasq-dns-764c5664d7-svlph\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.740574 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-svlph\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.740743 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsgx2\" (UniqueName: \"kubernetes.io/projected/04c9c0f3-24b8-4590-902e-bdd94bb01c25-kube-api-access-wsgx2\") pod \"dnsmasq-dns-764c5664d7-svlph\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.740771 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-svlph\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.740804 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-config\") pod \"dnsmasq-dns-764c5664d7-svlph\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.740890 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-svlph\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.740917 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-dns-svc\") pod \"dnsmasq-dns-764c5664d7-svlph\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.741578 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-svlph\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.741836 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-svlph\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.741895 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-dns-svc\") pod \"dnsmasq-dns-764c5664d7-svlph\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.742097 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-config\") pod \"dnsmasq-dns-764c5664d7-svlph\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.742350 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-svlph\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.758274 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsgx2\" (UniqueName: \"kubernetes.io/projected/04c9c0f3-24b8-4590-902e-bdd94bb01c25-kube-api-access-wsgx2\") pod \"dnsmasq-dns-764c5664d7-svlph\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:35 crc kubenswrapper[4838]: I0202 11:12:35.863737 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:36 crc kubenswrapper[4838]: I0202 11:12:36.363514 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-svlph"] Feb 02 11:12:37 crc kubenswrapper[4838]: I0202 11:12:37.205774 4838 generic.go:334] "Generic (PLEG): container finished" podID="04c9c0f3-24b8-4590-902e-bdd94bb01c25" containerID="a69ce1656dd4a0b000efef4a6ead2f328727fffa45e1beb50eebdbe565c8ccfe" exitCode=0 Feb 02 11:12:37 crc kubenswrapper[4838]: I0202 11:12:37.205837 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-svlph" event={"ID":"04c9c0f3-24b8-4590-902e-bdd94bb01c25","Type":"ContainerDied","Data":"a69ce1656dd4a0b000efef4a6ead2f328727fffa45e1beb50eebdbe565c8ccfe"} Feb 02 11:12:37 crc kubenswrapper[4838]: I0202 11:12:37.206089 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-svlph" event={"ID":"04c9c0f3-24b8-4590-902e-bdd94bb01c25","Type":"ContainerStarted","Data":"83f25f019de6aa083b3cd3202d9f7507691d16249fd8597889ce6d71ac2da559"} Feb 02 11:12:38 crc kubenswrapper[4838]: I0202 11:12:38.215681 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-svlph" event={"ID":"04c9c0f3-24b8-4590-902e-bdd94bb01c25","Type":"ContainerStarted","Data":"7ac670e2992f3818d1f1a3337795975b221121e28ff0dd1e1f5ed1b65802cccd"} Feb 02 11:12:38 crc kubenswrapper[4838]: I0202 11:12:38.215978 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:38 crc kubenswrapper[4838]: I0202 11:12:38.234134 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-764c5664d7-svlph" podStartSLOduration=3.234116976 podStartE2EDuration="3.234116976s" podCreationTimestamp="2026-02-02 11:12:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:12:38.23277605 +0000 UTC m=+1152.569877088" watchObservedRunningTime="2026-02-02 11:12:38.234116976 +0000 UTC m=+1152.571218004" Feb 02 11:12:42 crc kubenswrapper[4838]: I0202 11:12:42.256677 4838 generic.go:334] "Generic (PLEG): container finished" podID="cea963bb-084e-4ba7-98b6-342e509be13f" containerID="286d16d8368715d8e2a87addffe858bca8f702c15fe53b22bcaab13965c1e2ae" exitCode=0 Feb 02 11:12:42 crc kubenswrapper[4838]: I0202 11:12:42.256775 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-cckg8" event={"ID":"cea963bb-084e-4ba7-98b6-342e509be13f","Type":"ContainerDied","Data":"286d16d8368715d8e2a87addffe858bca8f702c15fe53b22bcaab13965c1e2ae"} Feb 02 11:12:43 crc kubenswrapper[4838]: I0202 11:12:43.570459 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-cckg8" Feb 02 11:12:43 crc kubenswrapper[4838]: I0202 11:12:43.670370 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjklf\" (UniqueName: \"kubernetes.io/projected/cea963bb-084e-4ba7-98b6-342e509be13f-kube-api-access-qjklf\") pod \"cea963bb-084e-4ba7-98b6-342e509be13f\" (UID: \"cea963bb-084e-4ba7-98b6-342e509be13f\") " Feb 02 11:12:43 crc kubenswrapper[4838]: I0202 11:12:43.670477 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cea963bb-084e-4ba7-98b6-342e509be13f-config-data\") pod \"cea963bb-084e-4ba7-98b6-342e509be13f\" (UID: \"cea963bb-084e-4ba7-98b6-342e509be13f\") " Feb 02 11:12:43 crc kubenswrapper[4838]: I0202 11:12:43.670684 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cea963bb-084e-4ba7-98b6-342e509be13f-combined-ca-bundle\") pod \"cea963bb-084e-4ba7-98b6-342e509be13f\" (UID: \"cea963bb-084e-4ba7-98b6-342e509be13f\") " Feb 02 11:12:43 crc kubenswrapper[4838]: I0202 11:12:43.677608 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cea963bb-084e-4ba7-98b6-342e509be13f-kube-api-access-qjklf" (OuterVolumeSpecName: "kube-api-access-qjklf") pod "cea963bb-084e-4ba7-98b6-342e509be13f" (UID: "cea963bb-084e-4ba7-98b6-342e509be13f"). InnerVolumeSpecName "kube-api-access-qjklf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:12:43 crc kubenswrapper[4838]: I0202 11:12:43.696863 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cea963bb-084e-4ba7-98b6-342e509be13f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cea963bb-084e-4ba7-98b6-342e509be13f" (UID: "cea963bb-084e-4ba7-98b6-342e509be13f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:12:43 crc kubenswrapper[4838]: I0202 11:12:43.743212 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cea963bb-084e-4ba7-98b6-342e509be13f-config-data" (OuterVolumeSpecName: "config-data") pod "cea963bb-084e-4ba7-98b6-342e509be13f" (UID: "cea963bb-084e-4ba7-98b6-342e509be13f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:12:43 crc kubenswrapper[4838]: I0202 11:12:43.773727 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cea963bb-084e-4ba7-98b6-342e509be13f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:43 crc kubenswrapper[4838]: I0202 11:12:43.773761 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjklf\" (UniqueName: \"kubernetes.io/projected/cea963bb-084e-4ba7-98b6-342e509be13f-kube-api-access-qjklf\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:43 crc kubenswrapper[4838]: I0202 11:12:43.773773 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cea963bb-084e-4ba7-98b6-342e509be13f-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.274849 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-cckg8" event={"ID":"cea963bb-084e-4ba7-98b6-342e509be13f","Type":"ContainerDied","Data":"2214f89c1b3ab127d3d500bc28f0bde87a1608a69c10c6c3a9dcfa1bd0446e68"} Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.275159 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2214f89c1b3ab127d3d500bc28f0bde87a1608a69c10c6c3a9dcfa1bd0446e68" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.274909 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-cckg8" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.526915 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-svlph"] Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.527153 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-764c5664d7-svlph" podUID="04c9c0f3-24b8-4590-902e-bdd94bb01c25" containerName="dnsmasq-dns" containerID="cri-o://7ac670e2992f3818d1f1a3337795975b221121e28ff0dd1e1f5ed1b65802cccd" gracePeriod=10 Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.527741 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.571191 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-kc8kc"] Feb 02 11:12:44 crc kubenswrapper[4838]: E0202 11:12:44.571667 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cea963bb-084e-4ba7-98b6-342e509be13f" containerName="keystone-db-sync" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.571684 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="cea963bb-084e-4ba7-98b6-342e509be13f" containerName="keystone-db-sync" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.571899 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="cea963bb-084e-4ba7-98b6-342e509be13f" containerName="keystone-db-sync" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.573660 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.590391 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-b8d7k"] Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.591881 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.598296 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.598357 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-9mbr8" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.598499 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.598728 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.598930 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.663983 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-kc8kc"] Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.690157 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-fernet-keys\") pod \"keystone-bootstrap-b8d7k\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.690414 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-config\") pod \"dnsmasq-dns-5959f8865f-kc8kc\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.690552 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rj2g2\" (UniqueName: \"kubernetes.io/projected/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-kube-api-access-rj2g2\") pod \"dnsmasq-dns-5959f8865f-kc8kc\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.690739 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-dns-svc\") pod \"dnsmasq-dns-5959f8865f-kc8kc\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.690884 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-kc8kc\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.691014 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-kc8kc\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.691223 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-credential-keys\") pod \"keystone-bootstrap-b8d7k\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.691323 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-scripts\") pod \"keystone-bootstrap-b8d7k\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.691437 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-combined-ca-bundle\") pod \"keystone-bootstrap-b8d7k\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.691601 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-kc8kc\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.691720 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-config-data\") pod \"keystone-bootstrap-b8d7k\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.691819 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ws6j\" (UniqueName: \"kubernetes.io/projected/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-kube-api-access-6ws6j\") pod \"keystone-bootstrap-b8d7k\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.702440 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-b8d7k"] Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.739052 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-db-create-lgk8x"] Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.740319 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-db-create-lgk8x" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.793525 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-credential-keys\") pod \"keystone-bootstrap-b8d7k\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.793567 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-scripts\") pod \"keystone-bootstrap-b8d7k\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.793597 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-combined-ca-bundle\") pod \"keystone-bootstrap-b8d7k\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.793658 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-kc8kc\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.793678 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-config-data\") pod \"keystone-bootstrap-b8d7k\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.793696 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ws6j\" (UniqueName: \"kubernetes.io/projected/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-kube-api-access-6ws6j\") pod \"keystone-bootstrap-b8d7k\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.793726 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-fernet-keys\") pod \"keystone-bootstrap-b8d7k\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.793744 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-config\") pod \"dnsmasq-dns-5959f8865f-kc8kc\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.793764 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rj2g2\" (UniqueName: \"kubernetes.io/projected/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-kube-api-access-rj2g2\") pod \"dnsmasq-dns-5959f8865f-kc8kc\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.793785 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-dns-svc\") pod \"dnsmasq-dns-5959f8865f-kc8kc\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.793818 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-kc8kc\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.793836 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-kc8kc\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.794709 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-kc8kc\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.794802 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-5jll9"] Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.795796 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5jll9" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.798691 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.798877 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.799637 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-kc8kc\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.800093 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-dns-svc\") pod \"dnsmasq-dns-5959f8865f-kc8kc\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.805159 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-kc8kc\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.806049 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-bsbkf" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.808929 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-config\") pod \"dnsmasq-dns-5959f8865f-kc8kc\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.809272 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-db-create-lgk8x"] Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.817055 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-credential-keys\") pod \"keystone-bootstrap-b8d7k\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.817061 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-fernet-keys\") pod \"keystone-bootstrap-b8d7k\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.817129 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-5jll9"] Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.832408 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-scripts\") pod \"keystone-bootstrap-b8d7k\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.833536 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-combined-ca-bundle\") pod \"keystone-bootstrap-b8d7k\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.874275 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rj2g2\" (UniqueName: \"kubernetes.io/projected/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-kube-api-access-rj2g2\") pod \"dnsmasq-dns-5959f8865f-kc8kc\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.887919 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ws6j\" (UniqueName: \"kubernetes.io/projected/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-kube-api-access-6ws6j\") pod \"keystone-bootstrap-b8d7k\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.909834 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-combined-ca-bundle\") pod \"cinder-db-sync-5jll9\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " pod="openstack/cinder-db-sync-5jll9" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.909924 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/989df2c8-4c47-4998-a79d-967c08df69fb-operator-scripts\") pod \"ironic-db-create-lgk8x\" (UID: \"989df2c8-4c47-4998-a79d-967c08df69fb\") " pod="openstack/ironic-db-create-lgk8x" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.909958 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-config-data\") pod \"cinder-db-sync-5jll9\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " pod="openstack/cinder-db-sync-5jll9" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.909986 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xct8m\" (UniqueName: \"kubernetes.io/projected/989df2c8-4c47-4998-a79d-967c08df69fb-kube-api-access-xct8m\") pod \"ironic-db-create-lgk8x\" (UID: \"989df2c8-4c47-4998-a79d-967c08df69fb\") " pod="openstack/ironic-db-create-lgk8x" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.910024 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29dl6\" (UniqueName: \"kubernetes.io/projected/12da676a-3c0b-4e05-996b-6f0b859d99e3-kube-api-access-29dl6\") pod \"cinder-db-sync-5jll9\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " pod="openstack/cinder-db-sync-5jll9" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.910112 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-db-sync-config-data\") pod \"cinder-db-sync-5jll9\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " pod="openstack/cinder-db-sync-5jll9" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.910211 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/12da676a-3c0b-4e05-996b-6f0b859d99e3-etc-machine-id\") pod \"cinder-db-sync-5jll9\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " pod="openstack/cinder-db-sync-5jll9" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.910229 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-scripts\") pod \"cinder-db-sync-5jll9\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " pod="openstack/cinder-db-sync-5jll9" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.914234 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.938392 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-config-data\") pod \"keystone-bootstrap-b8d7k\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.969246 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-8dc3-account-create-update-9m9lm"] Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.970823 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-8dc3-account-create-update-9m9lm" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.976369 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-db-secret" Feb 02 11:12:44 crc kubenswrapper[4838]: I0202 11:12:44.999771 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-8dc3-account-create-update-9m9lm"] Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.009863 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-4dkm9"] Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.011464 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-4dkm9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.014936 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-crmvz" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.015199 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.021248 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.026147 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.026790 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/989df2c8-4c47-4998-a79d-967c08df69fb-operator-scripts\") pod \"ironic-db-create-lgk8x\" (UID: \"989df2c8-4c47-4998-a79d-967c08df69fb\") " pod="openstack/ironic-db-create-lgk8x" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.026850 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-config-data\") pod \"cinder-db-sync-5jll9\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " pod="openstack/cinder-db-sync-5jll9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.026896 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xct8m\" (UniqueName: \"kubernetes.io/projected/989df2c8-4c47-4998-a79d-967c08df69fb-kube-api-access-xct8m\") pod \"ironic-db-create-lgk8x\" (UID: \"989df2c8-4c47-4998-a79d-967c08df69fb\") " pod="openstack/ironic-db-create-lgk8x" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.026944 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29dl6\" (UniqueName: \"kubernetes.io/projected/12da676a-3c0b-4e05-996b-6f0b859d99e3-kube-api-access-29dl6\") pod \"cinder-db-sync-5jll9\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " pod="openstack/cinder-db-sync-5jll9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.027062 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-db-sync-config-data\") pod \"cinder-db-sync-5jll9\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " pod="openstack/cinder-db-sync-5jll9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.027210 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/12da676a-3c0b-4e05-996b-6f0b859d99e3-etc-machine-id\") pod \"cinder-db-sync-5jll9\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " pod="openstack/cinder-db-sync-5jll9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.027232 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-scripts\") pod \"cinder-db-sync-5jll9\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " pod="openstack/cinder-db-sync-5jll9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.027308 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-combined-ca-bundle\") pod \"cinder-db-sync-5jll9\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " pod="openstack/cinder-db-sync-5jll9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.028449 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.028693 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.029002 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/12da676a-3c0b-4e05-996b-6f0b859d99e3-etc-machine-id\") pod \"cinder-db-sync-5jll9\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " pod="openstack/cinder-db-sync-5jll9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.031433 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-config-data\") pod \"cinder-db-sync-5jll9\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " pod="openstack/cinder-db-sync-5jll9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.033414 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.033933 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/989df2c8-4c47-4998-a79d-967c08df69fb-operator-scripts\") pod \"ironic-db-create-lgk8x\" (UID: \"989df2c8-4c47-4998-a79d-967c08df69fb\") " pod="openstack/ironic-db-create-lgk8x" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.041648 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-scripts\") pod \"cinder-db-sync-5jll9\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " pod="openstack/cinder-db-sync-5jll9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.047048 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29dl6\" (UniqueName: \"kubernetes.io/projected/12da676a-3c0b-4e05-996b-6f0b859d99e3-kube-api-access-29dl6\") pod \"cinder-db-sync-5jll9\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " pod="openstack/cinder-db-sync-5jll9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.051192 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-db-sync-config-data\") pod \"cinder-db-sync-5jll9\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " pod="openstack/cinder-db-sync-5jll9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.060884 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-m9rhm"] Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.062759 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-m9rhm" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.067463 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-fw9th" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.067800 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.068507 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.071406 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-combined-ca-bundle\") pod \"cinder-db-sync-5jll9\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " pod="openstack/cinder-db-sync-5jll9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.071932 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xct8m\" (UniqueName: \"kubernetes.io/projected/989df2c8-4c47-4998-a79d-967c08df69fb-kube-api-access-xct8m\") pod \"ironic-db-create-lgk8x\" (UID: \"989df2c8-4c47-4998-a79d-967c08df69fb\") " pod="openstack/ironic-db-create-lgk8x" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.079819 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-4dkm9"] Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.086758 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-db-create-lgk8x" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.088105 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-kc8kc"] Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.107421 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.118801 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-m9rhm"] Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.129684 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-config-data\") pod \"ceilometer-0\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.129769 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.129883 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vf5l\" (UniqueName: \"kubernetes.io/projected/d92acdcc-36a4-41c7-bf03-f60966090662-kube-api-access-4vf5l\") pod \"ironic-8dc3-account-create-update-9m9lm\" (UID: \"d92acdcc-36a4-41c7-bf03-f60966090662\") " pod="openstack/ironic-8dc3-account-create-update-9m9lm" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.130091 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50879d46-58dc-4716-89fd-bc68eea3bd2e-combined-ca-bundle\") pod \"barbican-db-sync-4dkm9\" (UID: \"50879d46-58dc-4716-89fd-bc68eea3bd2e\") " pod="openstack/barbican-db-sync-4dkm9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.130129 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/50879d46-58dc-4716-89fd-bc68eea3bd2e-db-sync-config-data\") pod \"barbican-db-sync-4dkm9\" (UID: \"50879d46-58dc-4716-89fd-bc68eea3bd2e\") " pod="openstack/barbican-db-sync-4dkm9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.130155 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/69335c29-7c9c-438d-ac8d-85141a4f9bb5-run-httpd\") pod \"ceilometer-0\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.130174 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxmgv\" (UniqueName: \"kubernetes.io/projected/69335c29-7c9c-438d-ac8d-85141a4f9bb5-kube-api-access-pxmgv\") pod \"ceilometer-0\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.130225 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d92acdcc-36a4-41c7-bf03-f60966090662-operator-scripts\") pod \"ironic-8dc3-account-create-update-9m9lm\" (UID: \"d92acdcc-36a4-41c7-bf03-f60966090662\") " pod="openstack/ironic-8dc3-account-create-update-9m9lm" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.130250 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/69335c29-7c9c-438d-ac8d-85141a4f9bb5-log-httpd\") pod \"ceilometer-0\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.130276 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.130301 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-scripts\") pod \"ceilometer-0\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.130327 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwvnq\" (UniqueName: \"kubernetes.io/projected/50879d46-58dc-4716-89fd-bc68eea3bd2e-kube-api-access-cwvnq\") pod \"barbican-db-sync-4dkm9\" (UID: \"50879d46-58dc-4716-89fd-bc68eea3bd2e\") " pod="openstack/barbican-db-sync-4dkm9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.134906 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-4npgg"] Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.140594 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.184739 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-4npgg"] Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.216989 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-swztr"] Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.218268 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-swztr" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.223298 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.223325 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.223538 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-rdxct" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.232890 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-config-data\") pod \"ceilometer-0\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.232955 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttqzg\" (UniqueName: \"kubernetes.io/projected/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-kube-api-access-ttqzg\") pod \"dnsmasq-dns-58dd9ff6bc-4npgg\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.232984 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.233015 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vf5l\" (UniqueName: \"kubernetes.io/projected/d92acdcc-36a4-41c7-bf03-f60966090662-kube-api-access-4vf5l\") pod \"ironic-8dc3-account-create-update-9m9lm\" (UID: \"d92acdcc-36a4-41c7-bf03-f60966090662\") " pod="openstack/ironic-8dc3-account-create-update-9m9lm" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.233069 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-4npgg\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.233102 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rc4tl\" (UniqueName: \"kubernetes.io/projected/dc01012d-a1bc-4849-8cc3-c0b3fc3f5504-kube-api-access-rc4tl\") pod \"neutron-db-sync-m9rhm\" (UID: \"dc01012d-a1bc-4849-8cc3-c0b3fc3f5504\") " pod="openstack/neutron-db-sync-m9rhm" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.233126 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-4npgg\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.233156 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50879d46-58dc-4716-89fd-bc68eea3bd2e-combined-ca-bundle\") pod \"barbican-db-sync-4dkm9\" (UID: \"50879d46-58dc-4716-89fd-bc68eea3bd2e\") " pod="openstack/barbican-db-sync-4dkm9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.233183 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-4npgg\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.233209 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxmgv\" (UniqueName: \"kubernetes.io/projected/69335c29-7c9c-438d-ac8d-85141a4f9bb5-kube-api-access-pxmgv\") pod \"ceilometer-0\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.233232 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/50879d46-58dc-4716-89fd-bc68eea3bd2e-db-sync-config-data\") pod \"barbican-db-sync-4dkm9\" (UID: \"50879d46-58dc-4716-89fd-bc68eea3bd2e\") " pod="openstack/barbican-db-sync-4dkm9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.233253 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/69335c29-7c9c-438d-ac8d-85141a4f9bb5-run-httpd\") pod \"ceilometer-0\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.233274 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-config\") pod \"dnsmasq-dns-58dd9ff6bc-4npgg\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.233303 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-4npgg\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.233336 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d92acdcc-36a4-41c7-bf03-f60966090662-operator-scripts\") pod \"ironic-8dc3-account-create-update-9m9lm\" (UID: \"d92acdcc-36a4-41c7-bf03-f60966090662\") " pod="openstack/ironic-8dc3-account-create-update-9m9lm" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.233358 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/69335c29-7c9c-438d-ac8d-85141a4f9bb5-log-httpd\") pod \"ceilometer-0\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.233384 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.233413 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-scripts\") pod \"ceilometer-0\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.233434 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwvnq\" (UniqueName: \"kubernetes.io/projected/50879d46-58dc-4716-89fd-bc68eea3bd2e-kube-api-access-cwvnq\") pod \"barbican-db-sync-4dkm9\" (UID: \"50879d46-58dc-4716-89fd-bc68eea3bd2e\") " pod="openstack/barbican-db-sync-4dkm9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.233457 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc01012d-a1bc-4849-8cc3-c0b3fc3f5504-combined-ca-bundle\") pod \"neutron-db-sync-m9rhm\" (UID: \"dc01012d-a1bc-4849-8cc3-c0b3fc3f5504\") " pod="openstack/neutron-db-sync-m9rhm" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.233507 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dc01012d-a1bc-4849-8cc3-c0b3fc3f5504-config\") pod \"neutron-db-sync-m9rhm\" (UID: \"dc01012d-a1bc-4849-8cc3-c0b3fc3f5504\") " pod="openstack/neutron-db-sync-m9rhm" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.237112 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-swztr"] Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.238405 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/69335c29-7c9c-438d-ac8d-85141a4f9bb5-log-httpd\") pod \"ceilometer-0\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.238700 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/69335c29-7c9c-438d-ac8d-85141a4f9bb5-run-httpd\") pod \"ceilometer-0\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.240385 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d92acdcc-36a4-41c7-bf03-f60966090662-operator-scripts\") pod \"ironic-8dc3-account-create-update-9m9lm\" (UID: \"d92acdcc-36a4-41c7-bf03-f60966090662\") " pod="openstack/ironic-8dc3-account-create-update-9m9lm" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.242289 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-config-data\") pod \"ceilometer-0\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.248542 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50879d46-58dc-4716-89fd-bc68eea3bd2e-combined-ca-bundle\") pod \"barbican-db-sync-4dkm9\" (UID: \"50879d46-58dc-4716-89fd-bc68eea3bd2e\") " pod="openstack/barbican-db-sync-4dkm9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.248557 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.254117 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.257473 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/50879d46-58dc-4716-89fd-bc68eea3bd2e-db-sync-config-data\") pod \"barbican-db-sync-4dkm9\" (UID: \"50879d46-58dc-4716-89fd-bc68eea3bd2e\") " pod="openstack/barbican-db-sync-4dkm9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.260026 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-scripts\") pod \"ceilometer-0\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.264859 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwvnq\" (UniqueName: \"kubernetes.io/projected/50879d46-58dc-4716-89fd-bc68eea3bd2e-kube-api-access-cwvnq\") pod \"barbican-db-sync-4dkm9\" (UID: \"50879d46-58dc-4716-89fd-bc68eea3bd2e\") " pod="openstack/barbican-db-sync-4dkm9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.273642 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vf5l\" (UniqueName: \"kubernetes.io/projected/d92acdcc-36a4-41c7-bf03-f60966090662-kube-api-access-4vf5l\") pod \"ironic-8dc3-account-create-update-9m9lm\" (UID: \"d92acdcc-36a4-41c7-bf03-f60966090662\") " pod="openstack/ironic-8dc3-account-create-update-9m9lm" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.275135 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxmgv\" (UniqueName: \"kubernetes.io/projected/69335c29-7c9c-438d-ac8d-85141a4f9bb5-kube-api-access-pxmgv\") pod \"ceilometer-0\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.297059 4838 generic.go:334] "Generic (PLEG): container finished" podID="04c9c0f3-24b8-4590-902e-bdd94bb01c25" containerID="7ac670e2992f3818d1f1a3337795975b221121e28ff0dd1e1f5ed1b65802cccd" exitCode=0 Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.297104 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-svlph" event={"ID":"04c9c0f3-24b8-4590-902e-bdd94bb01c25","Type":"ContainerDied","Data":"7ac670e2992f3818d1f1a3337795975b221121e28ff0dd1e1f5ed1b65802cccd"} Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.305314 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5jll9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.334831 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-config\") pod \"dnsmasq-dns-58dd9ff6bc-4npgg\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.334873 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-4npgg\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.334912 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc01012d-a1bc-4849-8cc3-c0b3fc3f5504-combined-ca-bundle\") pod \"neutron-db-sync-m9rhm\" (UID: \"dc01012d-a1bc-4849-8cc3-c0b3fc3f5504\") " pod="openstack/neutron-db-sync-m9rhm" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.334942 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c1f48f2-93aa-4b92-a289-7869c1993629-config-data\") pod \"placement-db-sync-swztr\" (UID: \"3c1f48f2-93aa-4b92-a289-7869c1993629\") " pod="openstack/placement-db-sync-swztr" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.334966 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dc01012d-a1bc-4849-8cc3-c0b3fc3f5504-config\") pod \"neutron-db-sync-m9rhm\" (UID: \"dc01012d-a1bc-4849-8cc3-c0b3fc3f5504\") " pod="openstack/neutron-db-sync-m9rhm" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.334986 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c1f48f2-93aa-4b92-a289-7869c1993629-combined-ca-bundle\") pod \"placement-db-sync-swztr\" (UID: \"3c1f48f2-93aa-4b92-a289-7869c1993629\") " pod="openstack/placement-db-sync-swztr" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.335016 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c1f48f2-93aa-4b92-a289-7869c1993629-logs\") pod \"placement-db-sync-swztr\" (UID: \"3c1f48f2-93aa-4b92-a289-7869c1993629\") " pod="openstack/placement-db-sync-swztr" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.335041 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttqzg\" (UniqueName: \"kubernetes.io/projected/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-kube-api-access-ttqzg\") pod \"dnsmasq-dns-58dd9ff6bc-4npgg\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.335087 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-4npgg\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.335110 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rc4tl\" (UniqueName: \"kubernetes.io/projected/dc01012d-a1bc-4849-8cc3-c0b3fc3f5504-kube-api-access-rc4tl\") pod \"neutron-db-sync-m9rhm\" (UID: \"dc01012d-a1bc-4849-8cc3-c0b3fc3f5504\") " pod="openstack/neutron-db-sync-m9rhm" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.335125 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-4npgg\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.335142 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c1f48f2-93aa-4b92-a289-7869c1993629-scripts\") pod \"placement-db-sync-swztr\" (UID: \"3c1f48f2-93aa-4b92-a289-7869c1993629\") " pod="openstack/placement-db-sync-swztr" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.335159 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8pwx\" (UniqueName: \"kubernetes.io/projected/3c1f48f2-93aa-4b92-a289-7869c1993629-kube-api-access-x8pwx\") pod \"placement-db-sync-swztr\" (UID: \"3c1f48f2-93aa-4b92-a289-7869c1993629\") " pod="openstack/placement-db-sync-swztr" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.335177 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-4npgg\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.335955 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-4npgg\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.347129 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-8dc3-account-create-update-9m9lm" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.348218 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-config\") pod \"dnsmasq-dns-58dd9ff6bc-4npgg\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.348716 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-4npgg\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.349840 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-4npgg\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.351344 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-4npgg\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.355807 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-4dkm9" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.356118 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/dc01012d-a1bc-4849-8cc3-c0b3fc3f5504-config\") pod \"neutron-db-sync-m9rhm\" (UID: \"dc01012d-a1bc-4849-8cc3-c0b3fc3f5504\") " pod="openstack/neutron-db-sync-m9rhm" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.368051 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc01012d-a1bc-4849-8cc3-c0b3fc3f5504-combined-ca-bundle\") pod \"neutron-db-sync-m9rhm\" (UID: \"dc01012d-a1bc-4849-8cc3-c0b3fc3f5504\") " pod="openstack/neutron-db-sync-m9rhm" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.377419 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttqzg\" (UniqueName: \"kubernetes.io/projected/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-kube-api-access-ttqzg\") pod \"dnsmasq-dns-58dd9ff6bc-4npgg\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.377573 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rc4tl\" (UniqueName: \"kubernetes.io/projected/dc01012d-a1bc-4849-8cc3-c0b3fc3f5504-kube-api-access-rc4tl\") pod \"neutron-db-sync-m9rhm\" (UID: \"dc01012d-a1bc-4849-8cc3-c0b3fc3f5504\") " pod="openstack/neutron-db-sync-m9rhm" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.383499 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.401045 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-m9rhm" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.429645 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.429711 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.438864 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c1f48f2-93aa-4b92-a289-7869c1993629-scripts\") pod \"placement-db-sync-swztr\" (UID: \"3c1f48f2-93aa-4b92-a289-7869c1993629\") " pod="openstack/placement-db-sync-swztr" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.438899 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8pwx\" (UniqueName: \"kubernetes.io/projected/3c1f48f2-93aa-4b92-a289-7869c1993629-kube-api-access-x8pwx\") pod \"placement-db-sync-swztr\" (UID: \"3c1f48f2-93aa-4b92-a289-7869c1993629\") " pod="openstack/placement-db-sync-swztr" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.438960 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c1f48f2-93aa-4b92-a289-7869c1993629-config-data\") pod \"placement-db-sync-swztr\" (UID: \"3c1f48f2-93aa-4b92-a289-7869c1993629\") " pod="openstack/placement-db-sync-swztr" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.438994 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c1f48f2-93aa-4b92-a289-7869c1993629-combined-ca-bundle\") pod \"placement-db-sync-swztr\" (UID: \"3c1f48f2-93aa-4b92-a289-7869c1993629\") " pod="openstack/placement-db-sync-swztr" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.439022 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c1f48f2-93aa-4b92-a289-7869c1993629-logs\") pod \"placement-db-sync-swztr\" (UID: \"3c1f48f2-93aa-4b92-a289-7869c1993629\") " pod="openstack/placement-db-sync-swztr" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.439468 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c1f48f2-93aa-4b92-a289-7869c1993629-logs\") pod \"placement-db-sync-swztr\" (UID: \"3c1f48f2-93aa-4b92-a289-7869c1993629\") " pod="openstack/placement-db-sync-swztr" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.443351 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c1f48f2-93aa-4b92-a289-7869c1993629-scripts\") pod \"placement-db-sync-swztr\" (UID: \"3c1f48f2-93aa-4b92-a289-7869c1993629\") " pod="openstack/placement-db-sync-swztr" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.447270 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c1f48f2-93aa-4b92-a289-7869c1993629-config-data\") pod \"placement-db-sync-swztr\" (UID: \"3c1f48f2-93aa-4b92-a289-7869c1993629\") " pod="openstack/placement-db-sync-swztr" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.449637 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c1f48f2-93aa-4b92-a289-7869c1993629-combined-ca-bundle\") pod \"placement-db-sync-swztr\" (UID: \"3c1f48f2-93aa-4b92-a289-7869c1993629\") " pod="openstack/placement-db-sync-swztr" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.456740 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8pwx\" (UniqueName: \"kubernetes.io/projected/3c1f48f2-93aa-4b92-a289-7869c1993629-kube-api-access-x8pwx\") pod \"placement-db-sync-swztr\" (UID: \"3c1f48f2-93aa-4b92-a289-7869c1993629\") " pod="openstack/placement-db-sync-swztr" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.464468 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.483323 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.545756 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-swztr" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.641646 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wsgx2\" (UniqueName: \"kubernetes.io/projected/04c9c0f3-24b8-4590-902e-bdd94bb01c25-kube-api-access-wsgx2\") pod \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.642057 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-dns-swift-storage-0\") pod \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.642098 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-ovsdbserver-nb\") pod \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.642136 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-ovsdbserver-sb\") pod \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.642167 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-config\") pod \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.642202 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-dns-svc\") pod \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\" (UID: \"04c9c0f3-24b8-4590-902e-bdd94bb01c25\") " Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.661964 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-b8d7k"] Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.690538 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-kc8kc"] Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.692159 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04c9c0f3-24b8-4590-902e-bdd94bb01c25-kube-api-access-wsgx2" (OuterVolumeSpecName: "kube-api-access-wsgx2") pod "04c9c0f3-24b8-4590-902e-bdd94bb01c25" (UID: "04c9c0f3-24b8-4590-902e-bdd94bb01c25"). InnerVolumeSpecName "kube-api-access-wsgx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.749743 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wsgx2\" (UniqueName: \"kubernetes.io/projected/04c9c0f3-24b8-4590-902e-bdd94bb01c25-kube-api-access-wsgx2\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.756070 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-config" (OuterVolumeSpecName: "config") pod "04c9c0f3-24b8-4590-902e-bdd94bb01c25" (UID: "04c9c0f3-24b8-4590-902e-bdd94bb01c25"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.774566 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "04c9c0f3-24b8-4590-902e-bdd94bb01c25" (UID: "04c9c0f3-24b8-4590-902e-bdd94bb01c25"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.789608 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "04c9c0f3-24b8-4590-902e-bdd94bb01c25" (UID: "04c9c0f3-24b8-4590-902e-bdd94bb01c25"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.821233 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "04c9c0f3-24b8-4590-902e-bdd94bb01c25" (UID: "04c9c0f3-24b8-4590-902e-bdd94bb01c25"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.821267 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "04c9c0f3-24b8-4590-902e-bdd94bb01c25" (UID: "04c9c0f3-24b8-4590-902e-bdd94bb01c25"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.851157 4838 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.851434 4838 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.851754 4838 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.851771 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:45 crc kubenswrapper[4838]: I0202 11:12:45.851779 4838 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/04c9c0f3-24b8-4590-902e-bdd94bb01c25-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:46 crc kubenswrapper[4838]: I0202 11:12:46.012367 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-db-create-lgk8x"] Feb 02 11:12:46 crc kubenswrapper[4838]: I0202 11:12:46.201636 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-5jll9"] Feb 02 11:12:46 crc kubenswrapper[4838]: W0202 11:12:46.232832 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod12da676a_3c0b_4e05_996b_6f0b859d99e3.slice/crio-a43cba69155e67a632d44189c74138934d004e5eb27583ed8688ac413fe6d55d WatchSource:0}: Error finding container a43cba69155e67a632d44189c74138934d004e5eb27583ed8688ac413fe6d55d: Status 404 returned error can't find the container with id a43cba69155e67a632d44189c74138934d004e5eb27583ed8688ac413fe6d55d Feb 02 11:12:46 crc kubenswrapper[4838]: I0202 11:12:46.317127 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-db-create-lgk8x" event={"ID":"989df2c8-4c47-4998-a79d-967c08df69fb","Type":"ContainerStarted","Data":"6825ad099b5e8304657f87e90a992d417acaf7be2e2f5e130c86153f48df3d5e"} Feb 02 11:12:46 crc kubenswrapper[4838]: I0202 11:12:46.326812 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" event={"ID":"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d","Type":"ContainerStarted","Data":"13432d7e69b4a359994cc3ae3398f48342ed3a422d0648e8fc9c6bb7b9055a68"} Feb 02 11:12:46 crc kubenswrapper[4838]: I0202 11:12:46.326856 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" event={"ID":"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d","Type":"ContainerStarted","Data":"13ce7818b75d3a03827f48885a6184ed932d45f3ea3cb4104b27dcca04b87ccb"} Feb 02 11:12:46 crc kubenswrapper[4838]: I0202 11:12:46.328661 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-svlph" event={"ID":"04c9c0f3-24b8-4590-902e-bdd94bb01c25","Type":"ContainerDied","Data":"83f25f019de6aa083b3cd3202d9f7507691d16249fd8597889ce6d71ac2da559"} Feb 02 11:12:46 crc kubenswrapper[4838]: I0202 11:12:46.328693 4838 scope.go:117] "RemoveContainer" containerID="7ac670e2992f3818d1f1a3337795975b221121e28ff0dd1e1f5ed1b65802cccd" Feb 02 11:12:46 crc kubenswrapper[4838]: I0202 11:12:46.328865 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-svlph" Feb 02 11:12:46 crc kubenswrapper[4838]: I0202 11:12:46.331548 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-b8d7k" event={"ID":"29c1c83f-d3d6-47fc-a81b-3fd664ab326d","Type":"ContainerStarted","Data":"f91b17c684b4d6b40ea3b722d0347894cf17236a8a8e438b6bad184eea6e2c16"} Feb 02 11:12:46 crc kubenswrapper[4838]: I0202 11:12:46.334046 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5jll9" event={"ID":"12da676a-3c0b-4e05-996b-6f0b859d99e3","Type":"ContainerStarted","Data":"a43cba69155e67a632d44189c74138934d004e5eb27583ed8688ac413fe6d55d"} Feb 02 11:12:46 crc kubenswrapper[4838]: I0202 11:12:46.353630 4838 scope.go:117] "RemoveContainer" containerID="a69ce1656dd4a0b000efef4a6ead2f328727fffa45e1beb50eebdbe565c8ccfe" Feb 02 11:12:46 crc kubenswrapper[4838]: I0202 11:12:46.366349 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-svlph"] Feb 02 11:12:46 crc kubenswrapper[4838]: I0202 11:12:46.374592 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-svlph"] Feb 02 11:12:46 crc kubenswrapper[4838]: I0202 11:12:46.463921 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-m9rhm"] Feb 02 11:12:46 crc kubenswrapper[4838]: I0202 11:12:46.473345 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-swztr"] Feb 02 11:12:46 crc kubenswrapper[4838]: I0202 11:12:46.486652 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:12:46 crc kubenswrapper[4838]: I0202 11:12:46.496779 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-8dc3-account-create-update-9m9lm"] Feb 02 11:12:46 crc kubenswrapper[4838]: W0202 11:12:46.501096 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod69335c29_7c9c_438d_ac8d_85141a4f9bb5.slice/crio-bcbfe5646f1ec57fe89c0d9b1cb903ee218b20e43e295f0014c611d7df243b3c WatchSource:0}: Error finding container bcbfe5646f1ec57fe89c0d9b1cb903ee218b20e43e295f0014c611d7df243b3c: Status 404 returned error can't find the container with id bcbfe5646f1ec57fe89c0d9b1cb903ee218b20e43e295f0014c611d7df243b3c Feb 02 11:12:46 crc kubenswrapper[4838]: W0202 11:12:46.516536 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50879d46_58dc_4716_89fd_bc68eea3bd2e.slice/crio-1527b659f79909f196019e151677f9611a1d0cce700721fe1c3d76bdec3141f2 WatchSource:0}: Error finding container 1527b659f79909f196019e151677f9611a1d0cce700721fe1c3d76bdec3141f2: Status 404 returned error can't find the container with id 1527b659f79909f196019e151677f9611a1d0cce700721fe1c3d76bdec3141f2 Feb 02 11:12:46 crc kubenswrapper[4838]: I0202 11:12:46.526337 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04c9c0f3-24b8-4590-902e-bdd94bb01c25" path="/var/lib/kubelet/pods/04c9c0f3-24b8-4590-902e-bdd94bb01c25/volumes" Feb 02 11:12:46 crc kubenswrapper[4838]: I0202 11:12:46.526984 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-4dkm9"] Feb 02 11:12:46 crc kubenswrapper[4838]: I0202 11:12:46.527794 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-4npgg"] Feb 02 11:12:46 crc kubenswrapper[4838]: I0202 11:12:46.857868 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:12:47 crc kubenswrapper[4838]: I0202 11:12:47.342316 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-db-create-lgk8x" event={"ID":"989df2c8-4c47-4998-a79d-967c08df69fb","Type":"ContainerStarted","Data":"f5df467f9b73675d5db53bb9ebb98ad3fa8454a7cff6f58b3e4f4ec825a5b01c"} Feb 02 11:12:47 crc kubenswrapper[4838]: I0202 11:12:47.345227 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-b8d7k" event={"ID":"29c1c83f-d3d6-47fc-a81b-3fd664ab326d","Type":"ContainerStarted","Data":"8b0bd4e83d9d936d1c40703a20fe501e869ef58a30c1d3e654ade208f5d2fa7f"} Feb 02 11:12:47 crc kubenswrapper[4838]: I0202 11:12:47.346490 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-swztr" event={"ID":"3c1f48f2-93aa-4b92-a289-7869c1993629","Type":"ContainerStarted","Data":"cd61b098d27d82d5d0c274a15237eb3468017f96ebcca6faabe6a4f08ad3e755"} Feb 02 11:12:47 crc kubenswrapper[4838]: I0202 11:12:47.347607 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-8dc3-account-create-update-9m9lm" event={"ID":"d92acdcc-36a4-41c7-bf03-f60966090662","Type":"ContainerStarted","Data":"0a41fdc7ec7ee8815bede0de87adfe7270ad9c2da1d20abe50a6d1ee3d240d24"} Feb 02 11:12:47 crc kubenswrapper[4838]: I0202 11:12:47.349335 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-m9rhm" event={"ID":"dc01012d-a1bc-4849-8cc3-c0b3fc3f5504","Type":"ContainerStarted","Data":"1d5cbeee6e023cbfb8e09f59ee31635ff2d97ec33d9dd5982af2a2591a8392af"} Feb 02 11:12:47 crc kubenswrapper[4838]: I0202 11:12:47.349378 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-m9rhm" event={"ID":"dc01012d-a1bc-4849-8cc3-c0b3fc3f5504","Type":"ContainerStarted","Data":"5663f3695d4e9a71d5a80c48ddfdc6aee679f359a6ec4c4747b39864198af827"} Feb 02 11:12:47 crc kubenswrapper[4838]: I0202 11:12:47.351376 4838 generic.go:334] "Generic (PLEG): container finished" podID="1e1f1c2e-fb1a-41bc-9815-5e2546c8640d" containerID="13432d7e69b4a359994cc3ae3398f48342ed3a422d0648e8fc9c6bb7b9055a68" exitCode=0 Feb 02 11:12:47 crc kubenswrapper[4838]: I0202 11:12:47.351439 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" event={"ID":"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d","Type":"ContainerDied","Data":"13432d7e69b4a359994cc3ae3398f48342ed3a422d0648e8fc9c6bb7b9055a68"} Feb 02 11:12:47 crc kubenswrapper[4838]: I0202 11:12:47.361965 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" event={"ID":"11d5db5b-3fc8-41a3-b60d-69391b17f4f5","Type":"ContainerStarted","Data":"95702c7397a388b0f84882fd20c58fd1eb623157ed2dd90f2903f0f5327f1800"} Feb 02 11:12:47 crc kubenswrapper[4838]: I0202 11:12:47.363981 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-4dkm9" event={"ID":"50879d46-58dc-4716-89fd-bc68eea3bd2e","Type":"ContainerStarted","Data":"1527b659f79909f196019e151677f9611a1d0cce700721fe1c3d76bdec3141f2"} Feb 02 11:12:47 crc kubenswrapper[4838]: I0202 11:12:47.365261 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"69335c29-7c9c-438d-ac8d-85141a4f9bb5","Type":"ContainerStarted","Data":"bcbfe5646f1ec57fe89c0d9b1cb903ee218b20e43e295f0014c611d7df243b3c"} Feb 02 11:12:47 crc kubenswrapper[4838]: I0202 11:12:47.367183 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ironic-db-create-lgk8x" podStartSLOduration=3.367162997 podStartE2EDuration="3.367162997s" podCreationTimestamp="2026-02-02 11:12:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:12:47.355023997 +0000 UTC m=+1161.692125035" watchObservedRunningTime="2026-02-02 11:12:47.367162997 +0000 UTC m=+1161.704264025" Feb 02 11:12:47 crc kubenswrapper[4838]: I0202 11:12:47.403649 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-b8d7k" podStartSLOduration=3.403631379 podStartE2EDuration="3.403631379s" podCreationTimestamp="2026-02-02 11:12:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:12:47.398870893 +0000 UTC m=+1161.735971921" watchObservedRunningTime="2026-02-02 11:12:47.403631379 +0000 UTC m=+1161.740732417" Feb 02 11:12:47 crc kubenswrapper[4838]: I0202 11:12:47.867082 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.001517 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-config\") pod \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.001597 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-dns-svc\") pod \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.001728 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-dns-swift-storage-0\") pod \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.001816 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-ovsdbserver-nb\") pod \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.001863 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rj2g2\" (UniqueName: \"kubernetes.io/projected/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-kube-api-access-rj2g2\") pod \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.001942 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-ovsdbserver-sb\") pod \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\" (UID: \"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d\") " Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.009841 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-kube-api-access-rj2g2" (OuterVolumeSpecName: "kube-api-access-rj2g2") pod "1e1f1c2e-fb1a-41bc-9815-5e2546c8640d" (UID: "1e1f1c2e-fb1a-41bc-9815-5e2546c8640d"). InnerVolumeSpecName "kube-api-access-rj2g2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.023953 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-config" (OuterVolumeSpecName: "config") pod "1e1f1c2e-fb1a-41bc-9815-5e2546c8640d" (UID: "1e1f1c2e-fb1a-41bc-9815-5e2546c8640d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.026800 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1e1f1c2e-fb1a-41bc-9815-5e2546c8640d" (UID: "1e1f1c2e-fb1a-41bc-9815-5e2546c8640d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.028522 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1e1f1c2e-fb1a-41bc-9815-5e2546c8640d" (UID: "1e1f1c2e-fb1a-41bc-9815-5e2546c8640d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.029488 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1e1f1c2e-fb1a-41bc-9815-5e2546c8640d" (UID: "1e1f1c2e-fb1a-41bc-9815-5e2546c8640d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.046056 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1e1f1c2e-fb1a-41bc-9815-5e2546c8640d" (UID: "1e1f1c2e-fb1a-41bc-9815-5e2546c8640d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.105537 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.105566 4838 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.105576 4838 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.105589 4838 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.105599 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rj2g2\" (UniqueName: \"kubernetes.io/projected/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-kube-api-access-rj2g2\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.105608 4838 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.381556 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" event={"ID":"11d5db5b-3fc8-41a3-b60d-69391b17f4f5","Type":"ContainerStarted","Data":"909195a472f294640b13ec43b6c1a4446157fb76ee0bc42d640f3a0f126f57c0"} Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.384075 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-8dc3-account-create-update-9m9lm" event={"ID":"d92acdcc-36a4-41c7-bf03-f60966090662","Type":"ContainerStarted","Data":"dd4e75cf1a445ce358dd00abea8be2a1f931d55aa1b063cd59b261aced1c7f73"} Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.387194 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.388289 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-kc8kc" event={"ID":"1e1f1c2e-fb1a-41bc-9815-5e2546c8640d","Type":"ContainerDied","Data":"13ce7818b75d3a03827f48885a6184ed932d45f3ea3cb4104b27dcca04b87ccb"} Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.388330 4838 scope.go:117] "RemoveContainer" containerID="13432d7e69b4a359994cc3ae3398f48342ed3a422d0648e8fc9c6bb7b9055a68" Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.427900 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-m9rhm" podStartSLOduration=4.427881685 podStartE2EDuration="4.427881685s" podCreationTimestamp="2026-02-02 11:12:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:12:48.420334336 +0000 UTC m=+1162.757435404" watchObservedRunningTime="2026-02-02 11:12:48.427881685 +0000 UTC m=+1162.764982713" Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.452195 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ironic-8dc3-account-create-update-9m9lm" podStartSLOduration=4.452178996 podStartE2EDuration="4.452178996s" podCreationTimestamp="2026-02-02 11:12:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:12:48.44624943 +0000 UTC m=+1162.783350498" watchObservedRunningTime="2026-02-02 11:12:48.452178996 +0000 UTC m=+1162.789280014" Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.521538 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-kc8kc"] Feb 02 11:12:48 crc kubenswrapper[4838]: I0202 11:12:48.521582 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-kc8kc"] Feb 02 11:12:49 crc kubenswrapper[4838]: I0202 11:12:49.397765 4838 generic.go:334] "Generic (PLEG): container finished" podID="11d5db5b-3fc8-41a3-b60d-69391b17f4f5" containerID="909195a472f294640b13ec43b6c1a4446157fb76ee0bc42d640f3a0f126f57c0" exitCode=0 Feb 02 11:12:49 crc kubenswrapper[4838]: I0202 11:12:49.397829 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" event={"ID":"11d5db5b-3fc8-41a3-b60d-69391b17f4f5","Type":"ContainerDied","Data":"909195a472f294640b13ec43b6c1a4446157fb76ee0bc42d640f3a0f126f57c0"} Feb 02 11:12:50 crc kubenswrapper[4838]: I0202 11:12:50.523020 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e1f1c2e-fb1a-41bc-9815-5e2546c8640d" path="/var/lib/kubelet/pods/1e1f1c2e-fb1a-41bc-9815-5e2546c8640d/volumes" Feb 02 11:12:52 crc kubenswrapper[4838]: I0202 11:12:52.452595 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" event={"ID":"11d5db5b-3fc8-41a3-b60d-69391b17f4f5","Type":"ContainerStarted","Data":"19d0d2218649d8d7f2f565c9f5139eb1acfb1ae0acae69a947e66543e1ce4dfd"} Feb 02 11:12:53 crc kubenswrapper[4838]: I0202 11:12:53.463772 4838 generic.go:334] "Generic (PLEG): container finished" podID="989df2c8-4c47-4998-a79d-967c08df69fb" containerID="f5df467f9b73675d5db53bb9ebb98ad3fa8454a7cff6f58b3e4f4ec825a5b01c" exitCode=0 Feb 02 11:12:53 crc kubenswrapper[4838]: I0202 11:12:53.463866 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-db-create-lgk8x" event={"ID":"989df2c8-4c47-4998-a79d-967c08df69fb","Type":"ContainerDied","Data":"f5df467f9b73675d5db53bb9ebb98ad3fa8454a7cff6f58b3e4f4ec825a5b01c"} Feb 02 11:12:53 crc kubenswrapper[4838]: I0202 11:12:53.464022 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:12:53 crc kubenswrapper[4838]: I0202 11:12:53.484533 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" podStartSLOduration=9.484494799 podStartE2EDuration="9.484494799s" podCreationTimestamp="2026-02-02 11:12:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:12:53.482811864 +0000 UTC m=+1167.819912912" watchObservedRunningTime="2026-02-02 11:12:53.484494799 +0000 UTC m=+1167.821595827" Feb 02 11:13:00 crc kubenswrapper[4838]: I0202 11:13:00.487024 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:13:00 crc kubenswrapper[4838]: I0202 11:13:00.563387 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-v77l6"] Feb 02 11:13:00 crc kubenswrapper[4838]: I0202 11:13:00.563726 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-v77l6" podUID="646dcf2a-3e0a-4470-8950-40543c320e6f" containerName="dnsmasq-dns" containerID="cri-o://9213026ee7acb986896eb0310d7f2e3cc1dd16e44607d8b4bbfe030fc2679c90" gracePeriod=10 Feb 02 11:13:01 crc kubenswrapper[4838]: I0202 11:13:01.562402 4838 generic.go:334] "Generic (PLEG): container finished" podID="646dcf2a-3e0a-4470-8950-40543c320e6f" containerID="9213026ee7acb986896eb0310d7f2e3cc1dd16e44607d8b4bbfe030fc2679c90" exitCode=0 Feb 02 11:13:01 crc kubenswrapper[4838]: I0202 11:13:01.562452 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-v77l6" event={"ID":"646dcf2a-3e0a-4470-8950-40543c320e6f","Type":"ContainerDied","Data":"9213026ee7acb986896eb0310d7f2e3cc1dd16e44607d8b4bbfe030fc2679c90"} Feb 02 11:13:03 crc kubenswrapper[4838]: E0202 11:13:03.432424 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Feb 02 11:13:03 crc kubenswrapper[4838]: E0202 11:13:03.433173 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cwvnq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-4dkm9_openstack(50879d46-58dc-4716-89fd-bc68eea3bd2e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:13:03 crc kubenswrapper[4838]: E0202 11:13:03.434492 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-4dkm9" podUID="50879d46-58dc-4716-89fd-bc68eea3bd2e" Feb 02 11:13:03 crc kubenswrapper[4838]: I0202 11:13:03.505633 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-db-create-lgk8x" Feb 02 11:13:03 crc kubenswrapper[4838]: I0202 11:13:03.564341 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-v77l6" podUID="646dcf2a-3e0a-4470-8950-40543c320e6f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.112:5353: connect: connection refused" Feb 02 11:13:03 crc kubenswrapper[4838]: I0202 11:13:03.588914 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-db-create-lgk8x" Feb 02 11:13:03 crc kubenswrapper[4838]: I0202 11:13:03.588913 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-db-create-lgk8x" event={"ID":"989df2c8-4c47-4998-a79d-967c08df69fb","Type":"ContainerDied","Data":"6825ad099b5e8304657f87e90a992d417acaf7be2e2f5e130c86153f48df3d5e"} Feb 02 11:13:03 crc kubenswrapper[4838]: I0202 11:13:03.589059 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6825ad099b5e8304657f87e90a992d417acaf7be2e2f5e130c86153f48df3d5e" Feb 02 11:13:03 crc kubenswrapper[4838]: I0202 11:13:03.594067 4838 generic.go:334] "Generic (PLEG): container finished" podID="d92acdcc-36a4-41c7-bf03-f60966090662" containerID="dd4e75cf1a445ce358dd00abea8be2a1f931d55aa1b063cd59b261aced1c7f73" exitCode=0 Feb 02 11:13:03 crc kubenswrapper[4838]: I0202 11:13:03.594105 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-8dc3-account-create-update-9m9lm" event={"ID":"d92acdcc-36a4-41c7-bf03-f60966090662","Type":"ContainerDied","Data":"dd4e75cf1a445ce358dd00abea8be2a1f931d55aa1b063cd59b261aced1c7f73"} Feb 02 11:13:03 crc kubenswrapper[4838]: E0202 11:13:03.597895 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-4dkm9" podUID="50879d46-58dc-4716-89fd-bc68eea3bd2e" Feb 02 11:13:03 crc kubenswrapper[4838]: I0202 11:13:03.675130 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/989df2c8-4c47-4998-a79d-967c08df69fb-operator-scripts\") pod \"989df2c8-4c47-4998-a79d-967c08df69fb\" (UID: \"989df2c8-4c47-4998-a79d-967c08df69fb\") " Feb 02 11:13:03 crc kubenswrapper[4838]: I0202 11:13:03.675187 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xct8m\" (UniqueName: \"kubernetes.io/projected/989df2c8-4c47-4998-a79d-967c08df69fb-kube-api-access-xct8m\") pod \"989df2c8-4c47-4998-a79d-967c08df69fb\" (UID: \"989df2c8-4c47-4998-a79d-967c08df69fb\") " Feb 02 11:13:03 crc kubenswrapper[4838]: I0202 11:13:03.676362 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/989df2c8-4c47-4998-a79d-967c08df69fb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "989df2c8-4c47-4998-a79d-967c08df69fb" (UID: "989df2c8-4c47-4998-a79d-967c08df69fb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:13:03 crc kubenswrapper[4838]: I0202 11:13:03.696519 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/989df2c8-4c47-4998-a79d-967c08df69fb-kube-api-access-xct8m" (OuterVolumeSpecName: "kube-api-access-xct8m") pod "989df2c8-4c47-4998-a79d-967c08df69fb" (UID: "989df2c8-4c47-4998-a79d-967c08df69fb"). InnerVolumeSpecName "kube-api-access-xct8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:13:03 crc kubenswrapper[4838]: I0202 11:13:03.778902 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/989df2c8-4c47-4998-a79d-967c08df69fb-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:03 crc kubenswrapper[4838]: I0202 11:13:03.779258 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xct8m\" (UniqueName: \"kubernetes.io/projected/989df2c8-4c47-4998-a79d-967c08df69fb-kube-api-access-xct8m\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:05 crc kubenswrapper[4838]: I0202 11:13:05.615276 4838 generic.go:334] "Generic (PLEG): container finished" podID="29c1c83f-d3d6-47fc-a81b-3fd664ab326d" containerID="8b0bd4e83d9d936d1c40703a20fe501e869ef58a30c1d3e654ade208f5d2fa7f" exitCode=0 Feb 02 11:13:05 crc kubenswrapper[4838]: I0202 11:13:05.615545 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-b8d7k" event={"ID":"29c1c83f-d3d6-47fc-a81b-3fd664ab326d","Type":"ContainerDied","Data":"8b0bd4e83d9d936d1c40703a20fe501e869ef58a30c1d3e654ade208f5d2fa7f"} Feb 02 11:13:13 crc kubenswrapper[4838]: I0202 11:13:13.563854 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-v77l6" podUID="646dcf2a-3e0a-4470-8950-40543c320e6f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.112:5353: i/o timeout" Feb 02 11:13:15 crc kubenswrapper[4838]: I0202 11:13:15.430259 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:13:15 crc kubenswrapper[4838]: I0202 11:13:15.430514 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:13:18 crc kubenswrapper[4838]: I0202 11:13:18.564773 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-v77l6" podUID="646dcf2a-3e0a-4470-8950-40543c320e6f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.112:5353: i/o timeout" Feb 02 11:13:18 crc kubenswrapper[4838]: I0202 11:13:18.565304 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.535015 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.538290 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.544089 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-8dc3-account-create-update-9m9lm" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.706365 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ws6j\" (UniqueName: \"kubernetes.io/projected/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-kube-api-access-6ws6j\") pod \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.706469 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d92acdcc-36a4-41c7-bf03-f60966090662-operator-scripts\") pod \"d92acdcc-36a4-41c7-bf03-f60966090662\" (UID: \"d92acdcc-36a4-41c7-bf03-f60966090662\") " Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.706495 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-scripts\") pod \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.706531 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-credential-keys\") pod \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.706562 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-fernet-keys\") pod \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.706633 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-dns-svc\") pod \"646dcf2a-3e0a-4470-8950-40543c320e6f\" (UID: \"646dcf2a-3e0a-4470-8950-40543c320e6f\") " Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.706660 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-ovsdbserver-sb\") pod \"646dcf2a-3e0a-4470-8950-40543c320e6f\" (UID: \"646dcf2a-3e0a-4470-8950-40543c320e6f\") " Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.706744 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-config\") pod \"646dcf2a-3e0a-4470-8950-40543c320e6f\" (UID: \"646dcf2a-3e0a-4470-8950-40543c320e6f\") " Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.706766 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-ovsdbserver-nb\") pod \"646dcf2a-3e0a-4470-8950-40543c320e6f\" (UID: \"646dcf2a-3e0a-4470-8950-40543c320e6f\") " Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.706802 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vf5l\" (UniqueName: \"kubernetes.io/projected/d92acdcc-36a4-41c7-bf03-f60966090662-kube-api-access-4vf5l\") pod \"d92acdcc-36a4-41c7-bf03-f60966090662\" (UID: \"d92acdcc-36a4-41c7-bf03-f60966090662\") " Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.706831 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-combined-ca-bundle\") pod \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.706863 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d7d8q\" (UniqueName: \"kubernetes.io/projected/646dcf2a-3e0a-4470-8950-40543c320e6f-kube-api-access-d7d8q\") pod \"646dcf2a-3e0a-4470-8950-40543c320e6f\" (UID: \"646dcf2a-3e0a-4470-8950-40543c320e6f\") " Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.706894 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-config-data\") pod \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\" (UID: \"29c1c83f-d3d6-47fc-a81b-3fd664ab326d\") " Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.707519 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d92acdcc-36a4-41c7-bf03-f60966090662-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d92acdcc-36a4-41c7-bf03-f60966090662" (UID: "d92acdcc-36a4-41c7-bf03-f60966090662"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.711439 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-kube-api-access-6ws6j" (OuterVolumeSpecName: "kube-api-access-6ws6j") pod "29c1c83f-d3d6-47fc-a81b-3fd664ab326d" (UID: "29c1c83f-d3d6-47fc-a81b-3fd664ab326d"). InnerVolumeSpecName "kube-api-access-6ws6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.711847 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/646dcf2a-3e0a-4470-8950-40543c320e6f-kube-api-access-d7d8q" (OuterVolumeSpecName: "kube-api-access-d7d8q") pod "646dcf2a-3e0a-4470-8950-40543c320e6f" (UID: "646dcf2a-3e0a-4470-8950-40543c320e6f"). InnerVolumeSpecName "kube-api-access-d7d8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.712920 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "29c1c83f-d3d6-47fc-a81b-3fd664ab326d" (UID: "29c1c83f-d3d6-47fc-a81b-3fd664ab326d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.713898 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "29c1c83f-d3d6-47fc-a81b-3fd664ab326d" (UID: "29c1c83f-d3d6-47fc-a81b-3fd664ab326d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.713962 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-scripts" (OuterVolumeSpecName: "scripts") pod "29c1c83f-d3d6-47fc-a81b-3fd664ab326d" (UID: "29c1c83f-d3d6-47fc-a81b-3fd664ab326d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.724857 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d92acdcc-36a4-41c7-bf03-f60966090662-kube-api-access-4vf5l" (OuterVolumeSpecName: "kube-api-access-4vf5l") pod "d92acdcc-36a4-41c7-bf03-f60966090662" (UID: "d92acdcc-36a4-41c7-bf03-f60966090662"). InnerVolumeSpecName "kube-api-access-4vf5l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.726837 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-v77l6" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.726965 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-v77l6" event={"ID":"646dcf2a-3e0a-4470-8950-40543c320e6f","Type":"ContainerDied","Data":"6b762e13fcb2e58c16d7890afc3afd96be03766f99f4d33c1717e49b55b61616"} Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.727721 4838 scope.go:117] "RemoveContainer" containerID="9213026ee7acb986896eb0310d7f2e3cc1dd16e44607d8b4bbfe030fc2679c90" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.731750 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-b8d7k" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.731759 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-b8d7k" event={"ID":"29c1c83f-d3d6-47fc-a81b-3fd664ab326d","Type":"ContainerDied","Data":"f91b17c684b4d6b40ea3b722d0347894cf17236a8a8e438b6bad184eea6e2c16"} Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.731949 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f91b17c684b4d6b40ea3b722d0347894cf17236a8a8e438b6bad184eea6e2c16" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.733507 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-8dc3-account-create-update-9m9lm" event={"ID":"d92acdcc-36a4-41c7-bf03-f60966090662","Type":"ContainerDied","Data":"0a41fdc7ec7ee8815bede0de87adfe7270ad9c2da1d20abe50a6d1ee3d240d24"} Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.733544 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a41fdc7ec7ee8815bede0de87adfe7270ad9c2da1d20abe50a6d1ee3d240d24" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.733595 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-8dc3-account-create-update-9m9lm" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.741915 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-config-data" (OuterVolumeSpecName: "config-data") pod "29c1c83f-d3d6-47fc-a81b-3fd664ab326d" (UID: "29c1c83f-d3d6-47fc-a81b-3fd664ab326d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.760519 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "646dcf2a-3e0a-4470-8950-40543c320e6f" (UID: "646dcf2a-3e0a-4470-8950-40543c320e6f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.761162 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "29c1c83f-d3d6-47fc-a81b-3fd664ab326d" (UID: "29c1c83f-d3d6-47fc-a81b-3fd664ab326d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.763102 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-config" (OuterVolumeSpecName: "config") pod "646dcf2a-3e0a-4470-8950-40543c320e6f" (UID: "646dcf2a-3e0a-4470-8950-40543c320e6f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.775224 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "646dcf2a-3e0a-4470-8950-40543c320e6f" (UID: "646dcf2a-3e0a-4470-8950-40543c320e6f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.778422 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "646dcf2a-3e0a-4470-8950-40543c320e6f" (UID: "646dcf2a-3e0a-4470-8950-40543c320e6f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.808324 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d92acdcc-36a4-41c7-bf03-f60966090662-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.808367 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.808378 4838 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.808387 4838 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.808397 4838 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.808408 4838 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.808417 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.808425 4838 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/646dcf2a-3e0a-4470-8950-40543c320e6f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.808433 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vf5l\" (UniqueName: \"kubernetes.io/projected/d92acdcc-36a4-41c7-bf03-f60966090662-kube-api-access-4vf5l\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.808443 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.808451 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d7d8q\" (UniqueName: \"kubernetes.io/projected/646dcf2a-3e0a-4470-8950-40543c320e6f-kube-api-access-d7d8q\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.808459 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:19 crc kubenswrapper[4838]: I0202 11:13:19.808467 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ws6j\" (UniqueName: \"kubernetes.io/projected/29c1c83f-d3d6-47fc-a81b-3fd664ab326d-kube-api-access-6ws6j\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.067983 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-v77l6"] Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.077149 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-v77l6"] Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.516029 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="646dcf2a-3e0a-4470-8950-40543c320e6f" path="/var/lib/kubelet/pods/646dcf2a-3e0a-4470-8950-40543c320e6f/volumes" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.673686 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-b8d7k"] Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.680080 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-b8d7k"] Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.777005 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-cvhqc"] Feb 02 11:13:20 crc kubenswrapper[4838]: E0202 11:13:20.777364 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d92acdcc-36a4-41c7-bf03-f60966090662" containerName="mariadb-account-create-update" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.777379 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="d92acdcc-36a4-41c7-bf03-f60966090662" containerName="mariadb-account-create-update" Feb 02 11:13:20 crc kubenswrapper[4838]: E0202 11:13:20.777391 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="646dcf2a-3e0a-4470-8950-40543c320e6f" containerName="dnsmasq-dns" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.777400 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="646dcf2a-3e0a-4470-8950-40543c320e6f" containerName="dnsmasq-dns" Feb 02 11:13:20 crc kubenswrapper[4838]: E0202 11:13:20.777413 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04c9c0f3-24b8-4590-902e-bdd94bb01c25" containerName="init" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.777419 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="04c9c0f3-24b8-4590-902e-bdd94bb01c25" containerName="init" Feb 02 11:13:20 crc kubenswrapper[4838]: E0202 11:13:20.777443 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e1f1c2e-fb1a-41bc-9815-5e2546c8640d" containerName="init" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.777449 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e1f1c2e-fb1a-41bc-9815-5e2546c8640d" containerName="init" Feb 02 11:13:20 crc kubenswrapper[4838]: E0202 11:13:20.777456 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04c9c0f3-24b8-4590-902e-bdd94bb01c25" containerName="dnsmasq-dns" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.777462 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="04c9c0f3-24b8-4590-902e-bdd94bb01c25" containerName="dnsmasq-dns" Feb 02 11:13:20 crc kubenswrapper[4838]: E0202 11:13:20.777474 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29c1c83f-d3d6-47fc-a81b-3fd664ab326d" containerName="keystone-bootstrap" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.777479 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="29c1c83f-d3d6-47fc-a81b-3fd664ab326d" containerName="keystone-bootstrap" Feb 02 11:13:20 crc kubenswrapper[4838]: E0202 11:13:20.777489 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="989df2c8-4c47-4998-a79d-967c08df69fb" containerName="mariadb-database-create" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.777494 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="989df2c8-4c47-4998-a79d-967c08df69fb" containerName="mariadb-database-create" Feb 02 11:13:20 crc kubenswrapper[4838]: E0202 11:13:20.777510 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="646dcf2a-3e0a-4470-8950-40543c320e6f" containerName="init" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.777516 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="646dcf2a-3e0a-4470-8950-40543c320e6f" containerName="init" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.777678 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e1f1c2e-fb1a-41bc-9815-5e2546c8640d" containerName="init" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.777690 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="04c9c0f3-24b8-4590-902e-bdd94bb01c25" containerName="dnsmasq-dns" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.777700 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="29c1c83f-d3d6-47fc-a81b-3fd664ab326d" containerName="keystone-bootstrap" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.777709 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="646dcf2a-3e0a-4470-8950-40543c320e6f" containerName="dnsmasq-dns" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.777715 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="d92acdcc-36a4-41c7-bf03-f60966090662" containerName="mariadb-account-create-update" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.777728 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="989df2c8-4c47-4998-a79d-967c08df69fb" containerName="mariadb-database-create" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.778299 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.780173 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-9mbr8" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.781039 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.781251 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.782154 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.783703 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.790147 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-cvhqc"] Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.828019 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-scripts\") pod \"keystone-bootstrap-cvhqc\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.828105 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-fernet-keys\") pod \"keystone-bootstrap-cvhqc\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.828159 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-combined-ca-bundle\") pod \"keystone-bootstrap-cvhqc\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.828197 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-config-data\") pod \"keystone-bootstrap-cvhqc\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.828220 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4r8r\" (UniqueName: \"kubernetes.io/projected/1c087507-7f44-4ead-b6e0-622152cb2eaf-kube-api-access-m4r8r\") pod \"keystone-bootstrap-cvhqc\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.828286 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-credential-keys\") pod \"keystone-bootstrap-cvhqc\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.929406 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-combined-ca-bundle\") pod \"keystone-bootstrap-cvhqc\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.929486 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-config-data\") pod \"keystone-bootstrap-cvhqc\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.929515 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4r8r\" (UniqueName: \"kubernetes.io/projected/1c087507-7f44-4ead-b6e0-622152cb2eaf-kube-api-access-m4r8r\") pod \"keystone-bootstrap-cvhqc\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.929582 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-credential-keys\") pod \"keystone-bootstrap-cvhqc\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.929647 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-scripts\") pod \"keystone-bootstrap-cvhqc\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.929693 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-fernet-keys\") pod \"keystone-bootstrap-cvhqc\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:20 crc kubenswrapper[4838]: E0202 11:13:20.929944 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Feb 02 11:13:20 crc kubenswrapper[4838]: E0202 11:13:20.930092 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-29dl6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-5jll9_openstack(12da676a-3c0b-4e05-996b-6f0b859d99e3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:13:20 crc kubenswrapper[4838]: E0202 11:13:20.933475 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-5jll9" podUID="12da676a-3c0b-4e05-996b-6f0b859d99e3" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.935139 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-config-data\") pod \"keystone-bootstrap-cvhqc\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.936045 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-scripts\") pod \"keystone-bootstrap-cvhqc\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.940105 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-fernet-keys\") pod \"keystone-bootstrap-cvhqc\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.940388 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-credential-keys\") pod \"keystone-bootstrap-cvhqc\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.942106 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-combined-ca-bundle\") pod \"keystone-bootstrap-cvhqc\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:20 crc kubenswrapper[4838]: I0202 11:13:20.947487 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4r8r\" (UniqueName: \"kubernetes.io/projected/1c087507-7f44-4ead-b6e0-622152cb2eaf-kube-api-access-m4r8r\") pod \"keystone-bootstrap-cvhqc\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:21 crc kubenswrapper[4838]: I0202 11:13:21.099187 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:21 crc kubenswrapper[4838]: E0202 11:13:21.751020 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-5jll9" podUID="12da676a-3c0b-4e05-996b-6f0b859d99e3" Feb 02 11:13:22 crc kubenswrapper[4838]: I0202 11:13:22.516411 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29c1c83f-d3d6-47fc-a81b-3fd664ab326d" path="/var/lib/kubelet/pods/29c1c83f-d3d6-47fc-a81b-3fd664ab326d/volumes" Feb 02 11:13:22 crc kubenswrapper[4838]: E0202 11:13:22.940913 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Feb 02 11:13:22 crc kubenswrapper[4838]: E0202 11:13:22.941096 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n76h5bch554h65bh678h9fh55hcdh7fh7h56bh649h59fh8bh6bh685h5fh5c9h647h59dh578h698hf4h65dh54fh7ch559hcbh58fhcdhbfh548q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pxmgv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(69335c29-7c9c-438d-ac8d-85141a4f9bb5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:13:22 crc kubenswrapper[4838]: I0202 11:13:22.942791 4838 scope.go:117] "RemoveContainer" containerID="38aedb83a626d18618986a5afeb292a4349bd1b05d30b3255777b1ecb084f65d" Feb 02 11:13:23 crc kubenswrapper[4838]: I0202 11:13:23.386688 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-cvhqc"] Feb 02 11:13:23 crc kubenswrapper[4838]: W0202 11:13:23.389973 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c087507_7f44_4ead_b6e0_622152cb2eaf.slice/crio-3b34d5bc699e3058fc82935a48bdc0ebd77b7994bfd5a0b646bd9ca4fdea754a WatchSource:0}: Error finding container 3b34d5bc699e3058fc82935a48bdc0ebd77b7994bfd5a0b646bd9ca4fdea754a: Status 404 returned error can't find the container with id 3b34d5bc699e3058fc82935a48bdc0ebd77b7994bfd5a0b646bd9ca4fdea754a Feb 02 11:13:23 crc kubenswrapper[4838]: I0202 11:13:23.565763 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-v77l6" podUID="646dcf2a-3e0a-4470-8950-40543c320e6f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.112:5353: i/o timeout" Feb 02 11:13:23 crc kubenswrapper[4838]: I0202 11:13:23.768682 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cvhqc" event={"ID":"1c087507-7f44-4ead-b6e0-622152cb2eaf","Type":"ContainerStarted","Data":"c51949688f02a5a86158690a24397981872e3a43b4c2e7c7456e37c01858a43a"} Feb 02 11:13:23 crc kubenswrapper[4838]: I0202 11:13:23.768731 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cvhqc" event={"ID":"1c087507-7f44-4ead-b6e0-622152cb2eaf","Type":"ContainerStarted","Data":"3b34d5bc699e3058fc82935a48bdc0ebd77b7994bfd5a0b646bd9ca4fdea754a"} Feb 02 11:13:23 crc kubenswrapper[4838]: I0202 11:13:23.773039 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-swztr" event={"ID":"3c1f48f2-93aa-4b92-a289-7869c1993629","Type":"ContainerStarted","Data":"c855af0fa2c08c2144c31666a641e6cbb7ea66182beb1e800da4f1f4d8f51032"} Feb 02 11:13:23 crc kubenswrapper[4838]: I0202 11:13:23.794587 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-cvhqc" podStartSLOduration=3.7945614450000003 podStartE2EDuration="3.794561445s" podCreationTimestamp="2026-02-02 11:13:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:13:23.790027375 +0000 UTC m=+1198.127128423" watchObservedRunningTime="2026-02-02 11:13:23.794561445 +0000 UTC m=+1198.131662473" Feb 02 11:13:23 crc kubenswrapper[4838]: I0202 11:13:23.810785 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-swztr" podStartSLOduration=6.864142342 podStartE2EDuration="39.810767583s" podCreationTimestamp="2026-02-02 11:12:44 +0000 UTC" firstStartedPulling="2026-02-02 11:12:46.501723117 +0000 UTC m=+1160.838824145" lastFinishedPulling="2026-02-02 11:13:19.448348358 +0000 UTC m=+1193.785449386" observedRunningTime="2026-02-02 11:13:23.809835948 +0000 UTC m=+1198.146936996" watchObservedRunningTime="2026-02-02 11:13:23.810767583 +0000 UTC m=+1198.147868621" Feb 02 11:13:24 crc kubenswrapper[4838]: I0202 11:13:24.786948 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-4dkm9" event={"ID":"50879d46-58dc-4716-89fd-bc68eea3bd2e","Type":"ContainerStarted","Data":"ac1a6fcf0eca00396a708ee597872b3453fc12310c0a4d0f7e014f7839628f48"} Feb 02 11:13:24 crc kubenswrapper[4838]: I0202 11:13:24.814590 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-4dkm9" podStartSLOduration=3.143078698 podStartE2EDuration="40.814567809s" podCreationTimestamp="2026-02-02 11:12:44 +0000 UTC" firstStartedPulling="2026-02-02 11:12:46.531363799 +0000 UTC m=+1160.868464837" lastFinishedPulling="2026-02-02 11:13:24.20285291 +0000 UTC m=+1198.539953948" observedRunningTime="2026-02-02 11:13:24.806949108 +0000 UTC m=+1199.144050146" watchObservedRunningTime="2026-02-02 11:13:24.814567809 +0000 UTC m=+1199.151668857" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.168438 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-db-sync-z8sb2"] Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.170816 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.174833 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-config-data" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.175150 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-scripts" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.175272 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-ironic-dockercfg-smhrz" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.183936 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-db-sync-z8sb2"] Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.224628 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04bf896a-e964-48a2-900e-44362394a6ac-scripts\") pod \"ironic-db-sync-z8sb2\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.225161 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04bf896a-e964-48a2-900e-44362394a6ac-combined-ca-bundle\") pod \"ironic-db-sync-z8sb2\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.225304 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04bf896a-e964-48a2-900e-44362394a6ac-config-data\") pod \"ironic-db-sync-z8sb2\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.225446 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlw4f\" (UniqueName: \"kubernetes.io/projected/04bf896a-e964-48a2-900e-44362394a6ac-kube-api-access-rlw4f\") pod \"ironic-db-sync-z8sb2\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.225562 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/04bf896a-e964-48a2-900e-44362394a6ac-config-data-merged\") pod \"ironic-db-sync-z8sb2\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.225717 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/04bf896a-e964-48a2-900e-44362394a6ac-etc-podinfo\") pod \"ironic-db-sync-z8sb2\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.327510 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/04bf896a-e964-48a2-900e-44362394a6ac-etc-podinfo\") pod \"ironic-db-sync-z8sb2\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.327556 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04bf896a-e964-48a2-900e-44362394a6ac-scripts\") pod \"ironic-db-sync-z8sb2\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.327591 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04bf896a-e964-48a2-900e-44362394a6ac-combined-ca-bundle\") pod \"ironic-db-sync-z8sb2\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.327665 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04bf896a-e964-48a2-900e-44362394a6ac-config-data\") pod \"ironic-db-sync-z8sb2\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.327717 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlw4f\" (UniqueName: \"kubernetes.io/projected/04bf896a-e964-48a2-900e-44362394a6ac-kube-api-access-rlw4f\") pod \"ironic-db-sync-z8sb2\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.327749 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/04bf896a-e964-48a2-900e-44362394a6ac-config-data-merged\") pod \"ironic-db-sync-z8sb2\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.331169 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/04bf896a-e964-48a2-900e-44362394a6ac-config-data-merged\") pod \"ironic-db-sync-z8sb2\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.337256 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04bf896a-e964-48a2-900e-44362394a6ac-scripts\") pod \"ironic-db-sync-z8sb2\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.337283 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/04bf896a-e964-48a2-900e-44362394a6ac-etc-podinfo\") pod \"ironic-db-sync-z8sb2\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.337789 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04bf896a-e964-48a2-900e-44362394a6ac-combined-ca-bundle\") pod \"ironic-db-sync-z8sb2\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.346819 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04bf896a-e964-48a2-900e-44362394a6ac-config-data\") pod \"ironic-db-sync-z8sb2\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.348465 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlw4f\" (UniqueName: \"kubernetes.io/projected/04bf896a-e964-48a2-900e-44362394a6ac-kube-api-access-rlw4f\") pod \"ironic-db-sync-z8sb2\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:13:25 crc kubenswrapper[4838]: I0202 11:13:25.539707 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:13:41 crc kubenswrapper[4838]: I0202 11:13:40.929671 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"69335c29-7c9c-438d-ac8d-85141a4f9bb5","Type":"ContainerStarted","Data":"a84e0ce2d735e4167f4606dd17d211b1dbb2be9fa7f998fc51fe43fc67059f1a"} Feb 02 11:13:41 crc kubenswrapper[4838]: I0202 11:13:41.536644 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-db-sync-z8sb2"] Feb 02 11:13:41 crc kubenswrapper[4838]: W0202 11:13:41.542606 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04bf896a_e964_48a2_900e_44362394a6ac.slice/crio-07ea8aa7a2ff3809bd2b770f4f63381ae8e62d662c05b272eb91ea44daf9507e WatchSource:0}: Error finding container 07ea8aa7a2ff3809bd2b770f4f63381ae8e62d662c05b272eb91ea44daf9507e: Status 404 returned error can't find the container with id 07ea8aa7a2ff3809bd2b770f4f63381ae8e62d662c05b272eb91ea44daf9507e Feb 02 11:13:41 crc kubenswrapper[4838]: I0202 11:13:41.939528 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-db-sync-z8sb2" event={"ID":"04bf896a-e964-48a2-900e-44362394a6ac","Type":"ContainerStarted","Data":"07ea8aa7a2ff3809bd2b770f4f63381ae8e62d662c05b272eb91ea44daf9507e"} Feb 02 11:13:42 crc kubenswrapper[4838]: I0202 11:13:42.955934 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5jll9" event={"ID":"12da676a-3c0b-4e05-996b-6f0b859d99e3","Type":"ContainerStarted","Data":"05667767dc9c615f688152f9f30c5e1ea9de23e5a3834c9f6d404517e4864ba3"} Feb 02 11:13:42 crc kubenswrapper[4838]: I0202 11:13:42.988422 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-5jll9" podStartSLOduration=3.646599883 podStartE2EDuration="58.988397621s" podCreationTimestamp="2026-02-02 11:12:44 +0000 UTC" firstStartedPulling="2026-02-02 11:12:46.234987464 +0000 UTC m=+1160.572088492" lastFinishedPulling="2026-02-02 11:13:41.576785192 +0000 UTC m=+1215.913886230" observedRunningTime="2026-02-02 11:13:42.982505236 +0000 UTC m=+1217.319606284" watchObservedRunningTime="2026-02-02 11:13:42.988397621 +0000 UTC m=+1217.325498659" Feb 02 11:13:43 crc kubenswrapper[4838]: I0202 11:13:43.981498 4838 generic.go:334] "Generic (PLEG): container finished" podID="1c087507-7f44-4ead-b6e0-622152cb2eaf" containerID="c51949688f02a5a86158690a24397981872e3a43b4c2e7c7456e37c01858a43a" exitCode=0 Feb 02 11:13:43 crc kubenswrapper[4838]: I0202 11:13:43.982298 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cvhqc" event={"ID":"1c087507-7f44-4ead-b6e0-622152cb2eaf","Type":"ContainerDied","Data":"c51949688f02a5a86158690a24397981872e3a43b4c2e7c7456e37c01858a43a"} Feb 02 11:13:45 crc kubenswrapper[4838]: I0202 11:13:45.429927 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:13:45 crc kubenswrapper[4838]: I0202 11:13:45.430284 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:13:45 crc kubenswrapper[4838]: I0202 11:13:45.430337 4838 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 11:13:45 crc kubenswrapper[4838]: I0202 11:13:45.431321 4838 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"34c7a9cc4d8fb6168afba32d2440c7d9ab6f69f8c80d4ae7f515c16fdb162626"} pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 11:13:45 crc kubenswrapper[4838]: I0202 11:13:45.431380 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" containerID="cri-o://34c7a9cc4d8fb6168afba32d2440c7d9ab6f69f8c80d4ae7f515c16fdb162626" gracePeriod=600 Feb 02 11:13:45 crc kubenswrapper[4838]: E0202 11:13:45.487914 4838 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc1e0f2bd_7afa_44f4_a3cb_cad88c063dce.slice/crio-34c7a9cc4d8fb6168afba32d2440c7d9ab6f69f8c80d4ae7f515c16fdb162626.scope\": RecentStats: unable to find data in memory cache]" Feb 02 11:13:46 crc kubenswrapper[4838]: I0202 11:13:46.002376 4838 generic.go:334] "Generic (PLEG): container finished" podID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerID="34c7a9cc4d8fb6168afba32d2440c7d9ab6f69f8c80d4ae7f515c16fdb162626" exitCode=0 Feb 02 11:13:46 crc kubenswrapper[4838]: I0202 11:13:46.002456 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerDied","Data":"34c7a9cc4d8fb6168afba32d2440c7d9ab6f69f8c80d4ae7f515c16fdb162626"} Feb 02 11:13:46 crc kubenswrapper[4838]: I0202 11:13:46.002730 4838 scope.go:117] "RemoveContainer" containerID="411e6aa6542cc291703765b915acdf4b4838b2ed95b8455f8ee0b804a9cfdae7" Feb 02 11:13:47 crc kubenswrapper[4838]: I0202 11:13:47.013831 4838 generic.go:334] "Generic (PLEG): container finished" podID="54de1caa-888f-433a-be5e-87b93932abc2" containerID="c467002944aa28ab1606b991f49e609351c7d6cd2c5e92143ff8d7880235d978" exitCode=0 Feb 02 11:13:47 crc kubenswrapper[4838]: I0202 11:13:47.013890 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-snvfg" event={"ID":"54de1caa-888f-433a-be5e-87b93932abc2","Type":"ContainerDied","Data":"c467002944aa28ab1606b991f49e609351c7d6cd2c5e92143ff8d7880235d978"} Feb 02 11:13:51 crc kubenswrapper[4838]: I0202 11:13:51.052284 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cvhqc" event={"ID":"1c087507-7f44-4ead-b6e0-622152cb2eaf","Type":"ContainerDied","Data":"3b34d5bc699e3058fc82935a48bdc0ebd77b7994bfd5a0b646bd9ca4fdea754a"} Feb 02 11:13:51 crc kubenswrapper[4838]: I0202 11:13:51.052783 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3b34d5bc699e3058fc82935a48bdc0ebd77b7994bfd5a0b646bd9ca4fdea754a" Feb 02 11:13:51 crc kubenswrapper[4838]: I0202 11:13:51.062418 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:51 crc kubenswrapper[4838]: I0202 11:13:51.218427 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-scripts\") pod \"1c087507-7f44-4ead-b6e0-622152cb2eaf\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " Feb 02 11:13:51 crc kubenswrapper[4838]: I0202 11:13:51.218504 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-fernet-keys\") pod \"1c087507-7f44-4ead-b6e0-622152cb2eaf\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " Feb 02 11:13:51 crc kubenswrapper[4838]: I0202 11:13:51.218568 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m4r8r\" (UniqueName: \"kubernetes.io/projected/1c087507-7f44-4ead-b6e0-622152cb2eaf-kube-api-access-m4r8r\") pod \"1c087507-7f44-4ead-b6e0-622152cb2eaf\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " Feb 02 11:13:51 crc kubenswrapper[4838]: I0202 11:13:51.218611 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-combined-ca-bundle\") pod \"1c087507-7f44-4ead-b6e0-622152cb2eaf\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " Feb 02 11:13:51 crc kubenswrapper[4838]: I0202 11:13:51.218764 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-config-data\") pod \"1c087507-7f44-4ead-b6e0-622152cb2eaf\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " Feb 02 11:13:51 crc kubenswrapper[4838]: I0202 11:13:51.218824 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-credential-keys\") pod \"1c087507-7f44-4ead-b6e0-622152cb2eaf\" (UID: \"1c087507-7f44-4ead-b6e0-622152cb2eaf\") " Feb 02 11:13:51 crc kubenswrapper[4838]: I0202 11:13:51.227144 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "1c087507-7f44-4ead-b6e0-622152cb2eaf" (UID: "1c087507-7f44-4ead-b6e0-622152cb2eaf"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:13:51 crc kubenswrapper[4838]: I0202 11:13:51.227162 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c087507-7f44-4ead-b6e0-622152cb2eaf-kube-api-access-m4r8r" (OuterVolumeSpecName: "kube-api-access-m4r8r") pod "1c087507-7f44-4ead-b6e0-622152cb2eaf" (UID: "1c087507-7f44-4ead-b6e0-622152cb2eaf"). InnerVolumeSpecName "kube-api-access-m4r8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:13:51 crc kubenswrapper[4838]: I0202 11:13:51.227177 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-scripts" (OuterVolumeSpecName: "scripts") pod "1c087507-7f44-4ead-b6e0-622152cb2eaf" (UID: "1c087507-7f44-4ead-b6e0-622152cb2eaf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:13:51 crc kubenswrapper[4838]: I0202 11:13:51.227202 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "1c087507-7f44-4ead-b6e0-622152cb2eaf" (UID: "1c087507-7f44-4ead-b6e0-622152cb2eaf"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:13:51 crc kubenswrapper[4838]: I0202 11:13:51.246184 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1c087507-7f44-4ead-b6e0-622152cb2eaf" (UID: "1c087507-7f44-4ead-b6e0-622152cb2eaf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:13:51 crc kubenswrapper[4838]: I0202 11:13:51.290390 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-config-data" (OuterVolumeSpecName: "config-data") pod "1c087507-7f44-4ead-b6e0-622152cb2eaf" (UID: "1c087507-7f44-4ead-b6e0-622152cb2eaf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:13:51 crc kubenswrapper[4838]: I0202 11:13:51.339240 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:51 crc kubenswrapper[4838]: I0202 11:13:51.339268 4838 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:51 crc kubenswrapper[4838]: I0202 11:13:51.339277 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m4r8r\" (UniqueName: \"kubernetes.io/projected/1c087507-7f44-4ead-b6e0-622152cb2eaf-kube-api-access-m4r8r\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:51 crc kubenswrapper[4838]: I0202 11:13:51.339286 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:51 crc kubenswrapper[4838]: I0202 11:13:51.339294 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:51 crc kubenswrapper[4838]: I0202 11:13:51.339302 4838 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1c087507-7f44-4ead-b6e0-622152cb2eaf-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.059844 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cvhqc" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.189832 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-554c88cfc6-svbvn"] Feb 02 11:13:52 crc kubenswrapper[4838]: E0202 11:13:52.190275 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c087507-7f44-4ead-b6e0-622152cb2eaf" containerName="keystone-bootstrap" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.190301 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c087507-7f44-4ead-b6e0-622152cb2eaf" containerName="keystone-bootstrap" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.190527 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c087507-7f44-4ead-b6e0-622152cb2eaf" containerName="keystone-bootstrap" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.191270 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.194347 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-9mbr8" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.194466 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.194674 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.194883 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.194891 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.194958 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.198194 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-554c88cfc6-svbvn"] Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.263416 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04f65e34-3c92-4288-86f4-cfc67c46de23-scripts\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.263515 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tg7n5\" (UniqueName: \"kubernetes.io/projected/04f65e34-3c92-4288-86f4-cfc67c46de23-kube-api-access-tg7n5\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.263551 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/04f65e34-3c92-4288-86f4-cfc67c46de23-public-tls-certs\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.263580 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/04f65e34-3c92-4288-86f4-cfc67c46de23-credential-keys\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.263598 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/04f65e34-3c92-4288-86f4-cfc67c46de23-internal-tls-certs\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.263630 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04f65e34-3c92-4288-86f4-cfc67c46de23-config-data\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.263685 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04f65e34-3c92-4288-86f4-cfc67c46de23-combined-ca-bundle\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.263716 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/04f65e34-3c92-4288-86f4-cfc67c46de23-fernet-keys\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.366087 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/04f65e34-3c92-4288-86f4-cfc67c46de23-credential-keys\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.366193 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/04f65e34-3c92-4288-86f4-cfc67c46de23-internal-tls-certs\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.366236 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04f65e34-3c92-4288-86f4-cfc67c46de23-config-data\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.366281 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04f65e34-3c92-4288-86f4-cfc67c46de23-combined-ca-bundle\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.366490 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/04f65e34-3c92-4288-86f4-cfc67c46de23-fernet-keys\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.366569 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04f65e34-3c92-4288-86f4-cfc67c46de23-scripts\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.366683 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tg7n5\" (UniqueName: \"kubernetes.io/projected/04f65e34-3c92-4288-86f4-cfc67c46de23-kube-api-access-tg7n5\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.366731 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/04f65e34-3c92-4288-86f4-cfc67c46de23-public-tls-certs\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.371819 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/04f65e34-3c92-4288-86f4-cfc67c46de23-fernet-keys\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.372655 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/04f65e34-3c92-4288-86f4-cfc67c46de23-public-tls-certs\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.375958 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04f65e34-3c92-4288-86f4-cfc67c46de23-scripts\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.376194 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/04f65e34-3c92-4288-86f4-cfc67c46de23-internal-tls-certs\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.376566 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04f65e34-3c92-4288-86f4-cfc67c46de23-config-data\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.377510 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04f65e34-3c92-4288-86f4-cfc67c46de23-combined-ca-bundle\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.384595 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/04f65e34-3c92-4288-86f4-cfc67c46de23-credential-keys\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.385520 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tg7n5\" (UniqueName: \"kubernetes.io/projected/04f65e34-3c92-4288-86f4-cfc67c46de23-kube-api-access-tg7n5\") pod \"keystone-554c88cfc6-svbvn\" (UID: \"04f65e34-3c92-4288-86f4-cfc67c46de23\") " pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:52 crc kubenswrapper[4838]: I0202 11:13:52.646013 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:13:55 crc kubenswrapper[4838]: I0202 11:13:55.328541 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-snvfg" Feb 02 11:13:55 crc kubenswrapper[4838]: I0202 11:13:55.418571 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ztctw\" (UniqueName: \"kubernetes.io/projected/54de1caa-888f-433a-be5e-87b93932abc2-kube-api-access-ztctw\") pod \"54de1caa-888f-433a-be5e-87b93932abc2\" (UID: \"54de1caa-888f-433a-be5e-87b93932abc2\") " Feb 02 11:13:55 crc kubenswrapper[4838]: I0202 11:13:55.418667 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/54de1caa-888f-433a-be5e-87b93932abc2-db-sync-config-data\") pod \"54de1caa-888f-433a-be5e-87b93932abc2\" (UID: \"54de1caa-888f-433a-be5e-87b93932abc2\") " Feb 02 11:13:55 crc kubenswrapper[4838]: I0202 11:13:55.418727 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54de1caa-888f-433a-be5e-87b93932abc2-combined-ca-bundle\") pod \"54de1caa-888f-433a-be5e-87b93932abc2\" (UID: \"54de1caa-888f-433a-be5e-87b93932abc2\") " Feb 02 11:13:55 crc kubenswrapper[4838]: I0202 11:13:55.418776 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54de1caa-888f-433a-be5e-87b93932abc2-config-data\") pod \"54de1caa-888f-433a-be5e-87b93932abc2\" (UID: \"54de1caa-888f-433a-be5e-87b93932abc2\") " Feb 02 11:13:55 crc kubenswrapper[4838]: I0202 11:13:55.424360 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54de1caa-888f-433a-be5e-87b93932abc2-kube-api-access-ztctw" (OuterVolumeSpecName: "kube-api-access-ztctw") pod "54de1caa-888f-433a-be5e-87b93932abc2" (UID: "54de1caa-888f-433a-be5e-87b93932abc2"). InnerVolumeSpecName "kube-api-access-ztctw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:13:55 crc kubenswrapper[4838]: I0202 11:13:55.428371 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54de1caa-888f-433a-be5e-87b93932abc2-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "54de1caa-888f-433a-be5e-87b93932abc2" (UID: "54de1caa-888f-433a-be5e-87b93932abc2"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:13:55 crc kubenswrapper[4838]: I0202 11:13:55.445522 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54de1caa-888f-433a-be5e-87b93932abc2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "54de1caa-888f-433a-be5e-87b93932abc2" (UID: "54de1caa-888f-433a-be5e-87b93932abc2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:13:55 crc kubenswrapper[4838]: I0202 11:13:55.460835 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54de1caa-888f-433a-be5e-87b93932abc2-config-data" (OuterVolumeSpecName: "config-data") pod "54de1caa-888f-433a-be5e-87b93932abc2" (UID: "54de1caa-888f-433a-be5e-87b93932abc2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:13:55 crc kubenswrapper[4838]: I0202 11:13:55.520596 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ztctw\" (UniqueName: \"kubernetes.io/projected/54de1caa-888f-433a-be5e-87b93932abc2-kube-api-access-ztctw\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:55 crc kubenswrapper[4838]: I0202 11:13:55.520647 4838 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/54de1caa-888f-433a-be5e-87b93932abc2-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:55 crc kubenswrapper[4838]: I0202 11:13:55.520658 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54de1caa-888f-433a-be5e-87b93932abc2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:55 crc kubenswrapper[4838]: I0202 11:13:55.520671 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54de1caa-888f-433a-be5e-87b93932abc2-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:13:56 crc kubenswrapper[4838]: I0202 11:13:56.109516 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-snvfg" event={"ID":"54de1caa-888f-433a-be5e-87b93932abc2","Type":"ContainerDied","Data":"a0042ff1a4615538f1e06137beb5f5fec92474e75ad9d142e41732bee581a2ec"} Feb 02 11:13:56 crc kubenswrapper[4838]: I0202 11:13:56.109925 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0042ff1a4615538f1e06137beb5f5fec92474e75ad9d142e41732bee581a2ec" Feb 02 11:13:56 crc kubenswrapper[4838]: I0202 11:13:56.110043 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-snvfg" Feb 02 11:13:56 crc kubenswrapper[4838]: I0202 11:13:56.781816 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-pkmk8"] Feb 02 11:13:56 crc kubenswrapper[4838]: E0202 11:13:56.782439 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54de1caa-888f-433a-be5e-87b93932abc2" containerName="glance-db-sync" Feb 02 11:13:56 crc kubenswrapper[4838]: I0202 11:13:56.782453 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="54de1caa-888f-433a-be5e-87b93932abc2" containerName="glance-db-sync" Feb 02 11:13:56 crc kubenswrapper[4838]: I0202 11:13:56.782672 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="54de1caa-888f-433a-be5e-87b93932abc2" containerName="glance-db-sync" Feb 02 11:13:56 crc kubenswrapper[4838]: I0202 11:13:56.783438 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:13:56 crc kubenswrapper[4838]: I0202 11:13:56.800025 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-pkmk8"] Feb 02 11:13:56 crc kubenswrapper[4838]: I0202 11:13:56.943098 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-pkmk8\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:13:56 crc kubenswrapper[4838]: I0202 11:13:56.943136 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-pkmk8\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:13:56 crc kubenswrapper[4838]: I0202 11:13:56.943183 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-config\") pod \"dnsmasq-dns-785d8bcb8c-pkmk8\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:13:56 crc kubenswrapper[4838]: I0202 11:13:56.943205 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q88gq\" (UniqueName: \"kubernetes.io/projected/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-kube-api-access-q88gq\") pod \"dnsmasq-dns-785d8bcb8c-pkmk8\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:13:56 crc kubenswrapper[4838]: I0202 11:13:56.943240 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-pkmk8\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:13:56 crc kubenswrapper[4838]: I0202 11:13:56.943304 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-pkmk8\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.045032 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-config\") pod \"dnsmasq-dns-785d8bcb8c-pkmk8\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.045091 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q88gq\" (UniqueName: \"kubernetes.io/projected/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-kube-api-access-q88gq\") pod \"dnsmasq-dns-785d8bcb8c-pkmk8\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.045131 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-pkmk8\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.045200 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-pkmk8\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.045243 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-pkmk8\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.045262 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-pkmk8\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.283833 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-pkmk8\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.284083 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-pkmk8\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.284002 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-config\") pod \"dnsmasq-dns-785d8bcb8c-pkmk8\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.283904 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-pkmk8\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.324971 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-pkmk8\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.326901 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q88gq\" (UniqueName: \"kubernetes.io/projected/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-kube-api-access-q88gq\") pod \"dnsmasq-dns-785d8bcb8c-pkmk8\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.408321 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.683002 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.684836 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.687654 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.687677 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-9mftv" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.690926 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.711450 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.861258 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " pod="openstack/glance-default-external-api-0" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.861342 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7ec80212-35c0-4405-937c-91af782b61c9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " pod="openstack/glance-default-external-api-0" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.861379 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ec80212-35c0-4405-937c-91af782b61c9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " pod="openstack/glance-default-external-api-0" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.861474 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfrnf\" (UniqueName: \"kubernetes.io/projected/7ec80212-35c0-4405-937c-91af782b61c9-kube-api-access-cfrnf\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " pod="openstack/glance-default-external-api-0" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.861545 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ec80212-35c0-4405-937c-91af782b61c9-scripts\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " pod="openstack/glance-default-external-api-0" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.861595 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ec80212-35c0-4405-937c-91af782b61c9-config-data\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " pod="openstack/glance-default-external-api-0" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.861744 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ec80212-35c0-4405-937c-91af782b61c9-logs\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " pod="openstack/glance-default-external-api-0" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.914928 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.916361 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.918582 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.922809 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.963722 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ec80212-35c0-4405-937c-91af782b61c9-logs\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " pod="openstack/glance-default-external-api-0" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.963793 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " pod="openstack/glance-default-external-api-0" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.963829 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7ec80212-35c0-4405-937c-91af782b61c9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " pod="openstack/glance-default-external-api-0" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.963854 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ec80212-35c0-4405-937c-91af782b61c9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " pod="openstack/glance-default-external-api-0" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.963905 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfrnf\" (UniqueName: \"kubernetes.io/projected/7ec80212-35c0-4405-937c-91af782b61c9-kube-api-access-cfrnf\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " pod="openstack/glance-default-external-api-0" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.963960 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ec80212-35c0-4405-937c-91af782b61c9-scripts\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " pod="openstack/glance-default-external-api-0" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.963996 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ec80212-35c0-4405-937c-91af782b61c9-config-data\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " pod="openstack/glance-default-external-api-0" Feb 02 11:13:57 crc kubenswrapper[4838]: I0202 11:13:57.964741 4838 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.065339 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-logs\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.065582 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.065739 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.065882 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.066017 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.066136 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.066259 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7b9x\" (UniqueName: \"kubernetes.io/projected/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-kube-api-access-m7b9x\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.168215 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.168334 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.168392 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.168468 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.168506 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.168546 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7b9x\" (UniqueName: \"kubernetes.io/projected/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-kube-api-access-m7b9x\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.168603 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-logs\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.168608 4838 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.222839 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ec80212-35c0-4405-937c-91af782b61c9-logs\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " pod="openstack/glance-default-external-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.235964 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7ec80212-35c0-4405-937c-91af782b61c9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " pod="openstack/glance-default-external-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.236000 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.236479 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-logs\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.240207 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ec80212-35c0-4405-937c-91af782b61c9-scripts\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " pod="openstack/glance-default-external-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.242384 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.243660 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ec80212-35c0-4405-937c-91af782b61c9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " pod="openstack/glance-default-external-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.244413 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ec80212-35c0-4405-937c-91af782b61c9-config-data\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " pod="openstack/glance-default-external-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.244707 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.246718 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.247571 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7b9x\" (UniqueName: \"kubernetes.io/projected/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-kube-api-access-m7b9x\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.247667 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfrnf\" (UniqueName: \"kubernetes.io/projected/7ec80212-35c0-4405-937c-91af782b61c9-kube-api-access-cfrnf\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " pod="openstack/glance-default-external-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.256318 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.286508 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " pod="openstack/glance-default-external-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.311716 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 11:13:58 crc kubenswrapper[4838]: I0202 11:13:58.523765 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 11:13:59 crc kubenswrapper[4838]: I0202 11:13:59.809030 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 11:13:59 crc kubenswrapper[4838]: I0202 11:13:59.868203 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 11:14:08 crc kubenswrapper[4838]: E0202 11:14:08.493640 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified" Feb 02 11:14:08 crc kubenswrapper[4838]: E0202 11:14:08.494354 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/container-scripts/init.sh],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:IronicPassword,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:IronicPassword,Optional:nil,},},},EnvVar{Name:PodName,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:PodNamespace,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:ProvisionNetwork,Value:,ValueFrom:nil,},EnvVar{Name:DatabaseHost,Value:openstack.openstack.svc,ValueFrom:nil,},EnvVar{Name:DatabaseName,Value:ironic,ValueFrom:nil,},EnvVar{Name:DeployHTTPURL,Value:,ValueFrom:nil,},EnvVar{Name:IngressDomain,Value:,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-merged,ReadOnly:false,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:etc-podinfo,ReadOnly:false,MountPath:/etc/podinfo,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rlw4f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-db-sync-z8sb2_openstack(04bf896a-e964-48a2-900e-44362394a6ac): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:14:08 crc kubenswrapper[4838]: E0202 11:14:08.495462 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ironic-db-sync-z8sb2" podUID="04bf896a-e964-48a2-900e-44362394a6ac" Feb 02 11:14:09 crc kubenswrapper[4838]: E0202 11:14:09.231914 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified\\\"\"" pod="openstack/ironic-db-sync-z8sb2" podUID="04bf896a-e964-48a2-900e-44362394a6ac" Feb 02 11:14:09 crc kubenswrapper[4838]: I0202 11:14:09.384475 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-pkmk8"] Feb 02 11:14:09 crc kubenswrapper[4838]: I0202 11:14:09.443273 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-554c88cfc6-svbvn"] Feb 02 11:14:09 crc kubenswrapper[4838]: I0202 11:14:09.502602 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 11:14:09 crc kubenswrapper[4838]: I0202 11:14:09.641894 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 11:14:09 crc kubenswrapper[4838]: W0202 11:14:09.650516 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ec80212_35c0_4405_937c_91af782b61c9.slice/crio-2f74546df1a50b531c1a96ec5eaf575babfd1d7f914ae73efa47064c295f3942 WatchSource:0}: Error finding container 2f74546df1a50b531c1a96ec5eaf575babfd1d7f914ae73efa47064c295f3942: Status 404 returned error can't find the container with id 2f74546df1a50b531c1a96ec5eaf575babfd1d7f914ae73efa47064c295f3942 Feb 02 11:14:10 crc kubenswrapper[4838]: I0202 11:14:10.249798 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa","Type":"ContainerStarted","Data":"36ec0f7de028a92b1256a93d1db87b430bc5f989307e9c3d2b60c317e70ecdbd"} Feb 02 11:14:10 crc kubenswrapper[4838]: I0202 11:14:10.251345 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7ec80212-35c0-4405-937c-91af782b61c9","Type":"ContainerStarted","Data":"2f74546df1a50b531c1a96ec5eaf575babfd1d7f914ae73efa47064c295f3942"} Feb 02 11:14:10 crc kubenswrapper[4838]: I0202 11:14:10.254250 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-554c88cfc6-svbvn" event={"ID":"04f65e34-3c92-4288-86f4-cfc67c46de23","Type":"ContainerStarted","Data":"c1126a49b396eb822b966bd15e0b58d63482fcb4479ef043d5d6c3aa2a71a43f"} Feb 02 11:14:10 crc kubenswrapper[4838]: I0202 11:14:10.254301 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-554c88cfc6-svbvn" event={"ID":"04f65e34-3c92-4288-86f4-cfc67c46de23","Type":"ContainerStarted","Data":"1f44e8f820369931bb46ae4ecea3854eba7c0927d81252e8be30560430423d23"} Feb 02 11:14:10 crc kubenswrapper[4838]: I0202 11:14:10.255227 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:14:10 crc kubenswrapper[4838]: I0202 11:14:10.269765 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"69335c29-7c9c-438d-ac8d-85141a4f9bb5","Type":"ContainerStarted","Data":"a2e6f8c7370409f7916d870010101c6f8924dea0dad807f3023768f11013ebba"} Feb 02 11:14:10 crc kubenswrapper[4838]: I0202 11:14:10.272367 4838 generic.go:334] "Generic (PLEG): container finished" podID="a0532d7b-d4a7-4021-9a42-8329a2ef50fc" containerID="2d44871bae04b179f8b9dabe190e8f8b63301ce7c00528361ff0c3d242f2b1ff" exitCode=0 Feb 02 11:14:10 crc kubenswrapper[4838]: I0202 11:14:10.272561 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" event={"ID":"a0532d7b-d4a7-4021-9a42-8329a2ef50fc","Type":"ContainerDied","Data":"2d44871bae04b179f8b9dabe190e8f8b63301ce7c00528361ff0c3d242f2b1ff"} Feb 02 11:14:10 crc kubenswrapper[4838]: I0202 11:14:10.272610 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" event={"ID":"a0532d7b-d4a7-4021-9a42-8329a2ef50fc","Type":"ContainerStarted","Data":"7739a2d6da279fe8145d94ef100cfb1fd055d0bf5462e7a9f8f6a32f6d36d8aa"} Feb 02 11:14:10 crc kubenswrapper[4838]: I0202 11:14:10.293758 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerStarted","Data":"59dac7f34e4b14b86296ead42a59d6f6e3f3b9fd93372b24781304406104890f"} Feb 02 11:14:10 crc kubenswrapper[4838]: I0202 11:14:10.315553 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-554c88cfc6-svbvn" podStartSLOduration=18.315530782 podStartE2EDuration="18.315530782s" podCreationTimestamp="2026-02-02 11:13:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:14:10.279546291 +0000 UTC m=+1244.616647349" watchObservedRunningTime="2026-02-02 11:14:10.315530782 +0000 UTC m=+1244.652631810" Feb 02 11:14:11 crc kubenswrapper[4838]: I0202 11:14:11.307332 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7ec80212-35c0-4405-937c-91af782b61c9","Type":"ContainerStarted","Data":"3c4a7d6864529ecca3982adb8848ba2efdc901f1d592afd6c97ad5de0421f0e1"} Feb 02 11:14:11 crc kubenswrapper[4838]: I0202 11:14:11.308071 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7ec80212-35c0-4405-937c-91af782b61c9","Type":"ContainerStarted","Data":"96a4c1fcc8fb4b29643e695cf5383a6c3eae860fe1a1bf903fb35ed1a9aa174c"} Feb 02 11:14:11 crc kubenswrapper[4838]: I0202 11:14:11.307553 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="7ec80212-35c0-4405-937c-91af782b61c9" containerName="glance-httpd" containerID="cri-o://3c4a7d6864529ecca3982adb8848ba2efdc901f1d592afd6c97ad5de0421f0e1" gracePeriod=30 Feb 02 11:14:11 crc kubenswrapper[4838]: I0202 11:14:11.307482 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="7ec80212-35c0-4405-937c-91af782b61c9" containerName="glance-log" containerID="cri-o://96a4c1fcc8fb4b29643e695cf5383a6c3eae860fe1a1bf903fb35ed1a9aa174c" gracePeriod=30 Feb 02 11:14:11 crc kubenswrapper[4838]: I0202 11:14:11.310650 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" event={"ID":"a0532d7b-d4a7-4021-9a42-8329a2ef50fc","Type":"ContainerStarted","Data":"467e1f30e3e3bcb78002cccefd61abf3f2f4e06c3c4cf19e0fcb8f1f1aa09bf9"} Feb 02 11:14:11 crc kubenswrapper[4838]: I0202 11:14:11.310818 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:14:11 crc kubenswrapper[4838]: I0202 11:14:11.318935 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa","Type":"ContainerStarted","Data":"8257470b3cda351cad2c041b925b75e45fbd5d8085656c84a75d1486c30f843f"} Feb 02 11:14:11 crc kubenswrapper[4838]: I0202 11:14:11.336099 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=15.33607925 podStartE2EDuration="15.33607925s" podCreationTimestamp="2026-02-02 11:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:14:11.329271001 +0000 UTC m=+1245.666372039" watchObservedRunningTime="2026-02-02 11:14:11.33607925 +0000 UTC m=+1245.673180298" Feb 02 11:14:11 crc kubenswrapper[4838]: I0202 11:14:11.352399 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" podStartSLOduration=15.352382831 podStartE2EDuration="15.352382831s" podCreationTimestamp="2026-02-02 11:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:14:11.346682661 +0000 UTC m=+1245.683783739" watchObservedRunningTime="2026-02-02 11:14:11.352382831 +0000 UTC m=+1245.689483849" Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.333764 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa","Type":"ContainerStarted","Data":"c9ee5f989625536b01cf7a603e165ab3a01dd84280c5d8fe34a6f701e1b48d76"} Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.333852 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="f7c1a3b5-d164-4426-9fbb-1c5e50347cfa" containerName="glance-log" containerID="cri-o://8257470b3cda351cad2c041b925b75e45fbd5d8085656c84a75d1486c30f843f" gracePeriod=30 Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.334050 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="f7c1a3b5-d164-4426-9fbb-1c5e50347cfa" containerName="glance-httpd" containerID="cri-o://c9ee5f989625536b01cf7a603e165ab3a01dd84280c5d8fe34a6f701e1b48d76" gracePeriod=30 Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.338893 4838 generic.go:334] "Generic (PLEG): container finished" podID="7ec80212-35c0-4405-937c-91af782b61c9" containerID="3c4a7d6864529ecca3982adb8848ba2efdc901f1d592afd6c97ad5de0421f0e1" exitCode=143 Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.338927 4838 generic.go:334] "Generic (PLEG): container finished" podID="7ec80212-35c0-4405-937c-91af782b61c9" containerID="96a4c1fcc8fb4b29643e695cf5383a6c3eae860fe1a1bf903fb35ed1a9aa174c" exitCode=143 Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.338976 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7ec80212-35c0-4405-937c-91af782b61c9","Type":"ContainerDied","Data":"3c4a7d6864529ecca3982adb8848ba2efdc901f1d592afd6c97ad5de0421f0e1"} Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.339027 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7ec80212-35c0-4405-937c-91af782b61c9","Type":"ContainerDied","Data":"96a4c1fcc8fb4b29643e695cf5383a6c3eae860fe1a1bf903fb35ed1a9aa174c"} Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.368431 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=16.36841199 podStartE2EDuration="16.36841199s" podCreationTimestamp="2026-02-02 11:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:14:12.362668029 +0000 UTC m=+1246.699769097" watchObservedRunningTime="2026-02-02 11:14:12.36841199 +0000 UTC m=+1246.705513018" Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.708432 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.815013 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ec80212-35c0-4405-937c-91af782b61c9-scripts\") pod \"7ec80212-35c0-4405-937c-91af782b61c9\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.815153 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7ec80212-35c0-4405-937c-91af782b61c9-httpd-run\") pod \"7ec80212-35c0-4405-937c-91af782b61c9\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.815184 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"7ec80212-35c0-4405-937c-91af782b61c9\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.815211 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfrnf\" (UniqueName: \"kubernetes.io/projected/7ec80212-35c0-4405-937c-91af782b61c9-kube-api-access-cfrnf\") pod \"7ec80212-35c0-4405-937c-91af782b61c9\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.815236 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ec80212-35c0-4405-937c-91af782b61c9-config-data\") pod \"7ec80212-35c0-4405-937c-91af782b61c9\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.815263 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ec80212-35c0-4405-937c-91af782b61c9-logs\") pod \"7ec80212-35c0-4405-937c-91af782b61c9\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.815310 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ec80212-35c0-4405-937c-91af782b61c9-combined-ca-bundle\") pod \"7ec80212-35c0-4405-937c-91af782b61c9\" (UID: \"7ec80212-35c0-4405-937c-91af782b61c9\") " Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.816287 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ec80212-35c0-4405-937c-91af782b61c9-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "7ec80212-35c0-4405-937c-91af782b61c9" (UID: "7ec80212-35c0-4405-937c-91af782b61c9"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.816296 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ec80212-35c0-4405-937c-91af782b61c9-logs" (OuterVolumeSpecName: "logs") pod "7ec80212-35c0-4405-937c-91af782b61c9" (UID: "7ec80212-35c0-4405-937c-91af782b61c9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.824074 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "7ec80212-35c0-4405-937c-91af782b61c9" (UID: "7ec80212-35c0-4405-937c-91af782b61c9"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.825019 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ec80212-35c0-4405-937c-91af782b61c9-scripts" (OuterVolumeSpecName: "scripts") pod "7ec80212-35c0-4405-937c-91af782b61c9" (UID: "7ec80212-35c0-4405-937c-91af782b61c9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.828040 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ec80212-35c0-4405-937c-91af782b61c9-kube-api-access-cfrnf" (OuterVolumeSpecName: "kube-api-access-cfrnf") pod "7ec80212-35c0-4405-937c-91af782b61c9" (UID: "7ec80212-35c0-4405-937c-91af782b61c9"). InnerVolumeSpecName "kube-api-access-cfrnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.844868 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ec80212-35c0-4405-937c-91af782b61c9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ec80212-35c0-4405-937c-91af782b61c9" (UID: "7ec80212-35c0-4405-937c-91af782b61c9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.877571 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ec80212-35c0-4405-937c-91af782b61c9-config-data" (OuterVolumeSpecName: "config-data") pod "7ec80212-35c0-4405-937c-91af782b61c9" (UID: "7ec80212-35c0-4405-937c-91af782b61c9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.917556 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ec80212-35c0-4405-937c-91af782b61c9-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.917942 4838 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7ec80212-35c0-4405-937c-91af782b61c9-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.917980 4838 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.917992 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfrnf\" (UniqueName: \"kubernetes.io/projected/7ec80212-35c0-4405-937c-91af782b61c9-kube-api-access-cfrnf\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.918003 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ec80212-35c0-4405-937c-91af782b61c9-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.918011 4838 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ec80212-35c0-4405-937c-91af782b61c9-logs\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.918022 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ec80212-35c0-4405-937c-91af782b61c9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:12 crc kubenswrapper[4838]: I0202 11:14:12.935073 4838 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.018951 4838 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.348765 4838 generic.go:334] "Generic (PLEG): container finished" podID="f7c1a3b5-d164-4426-9fbb-1c5e50347cfa" containerID="c9ee5f989625536b01cf7a603e165ab3a01dd84280c5d8fe34a6f701e1b48d76" exitCode=0 Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.348803 4838 generic.go:334] "Generic (PLEG): container finished" podID="f7c1a3b5-d164-4426-9fbb-1c5e50347cfa" containerID="8257470b3cda351cad2c041b925b75e45fbd5d8085656c84a75d1486c30f843f" exitCode=143 Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.348819 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa","Type":"ContainerDied","Data":"c9ee5f989625536b01cf7a603e165ab3a01dd84280c5d8fe34a6f701e1b48d76"} Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.348880 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa","Type":"ContainerDied","Data":"8257470b3cda351cad2c041b925b75e45fbd5d8085656c84a75d1486c30f843f"} Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.353090 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7ec80212-35c0-4405-937c-91af782b61c9","Type":"ContainerDied","Data":"2f74546df1a50b531c1a96ec5eaf575babfd1d7f914ae73efa47064c295f3942"} Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.353216 4838 scope.go:117] "RemoveContainer" containerID="3c4a7d6864529ecca3982adb8848ba2efdc901f1d592afd6c97ad5de0421f0e1" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.353220 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.389066 4838 scope.go:117] "RemoveContainer" containerID="96a4c1fcc8fb4b29643e695cf5383a6c3eae860fe1a1bf903fb35ed1a9aa174c" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.395020 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.407801 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.424555 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 11:14:13 crc kubenswrapper[4838]: E0202 11:14:13.424967 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ec80212-35c0-4405-937c-91af782b61c9" containerName="glance-httpd" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.424984 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ec80212-35c0-4405-937c-91af782b61c9" containerName="glance-httpd" Feb 02 11:14:13 crc kubenswrapper[4838]: E0202 11:14:13.425004 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ec80212-35c0-4405-937c-91af782b61c9" containerName="glance-log" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.425009 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ec80212-35c0-4405-937c-91af782b61c9" containerName="glance-log" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.425165 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ec80212-35c0-4405-937c-91af782b61c9" containerName="glance-httpd" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.425188 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ec80212-35c0-4405-937c-91af782b61c9" containerName="glance-log" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.426066 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.432407 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.438453 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.438807 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.510999 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.526446 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.526700 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-scripts\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.526837 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.526923 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-config-data\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.527141 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-logs\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.527226 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltvvb\" (UniqueName: \"kubernetes.io/projected/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-kube-api-access-ltvvb\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.527342 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.527410 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.628576 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-logs\") pod \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.628798 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-combined-ca-bundle\") pod \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.628841 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.628930 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-scripts\") pod \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.628980 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-httpd-run\") pod \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.629069 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m7b9x\" (UniqueName: \"kubernetes.io/projected/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-kube-api-access-m7b9x\") pod \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.629116 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-config-data\") pod \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\" (UID: \"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa\") " Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.629240 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-logs" (OuterVolumeSpecName: "logs") pod "f7c1a3b5-d164-4426-9fbb-1c5e50347cfa" (UID: "f7c1a3b5-d164-4426-9fbb-1c5e50347cfa"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.629370 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-scripts\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.629422 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.629457 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f7c1a3b5-d164-4426-9fbb-1c5e50347cfa" (UID: "f7c1a3b5-d164-4426-9fbb-1c5e50347cfa"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.629465 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-config-data\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.629777 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-logs\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.629817 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltvvb\" (UniqueName: \"kubernetes.io/projected/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-kube-api-access-ltvvb\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.629931 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.629967 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.630052 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.630113 4838 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.630124 4838 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-logs\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.630606 4838 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.630929 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-logs\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.631269 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.636777 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.639611 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-kube-api-access-m7b9x" (OuterVolumeSpecName: "kube-api-access-m7b9x") pod "f7c1a3b5-d164-4426-9fbb-1c5e50347cfa" (UID: "f7c1a3b5-d164-4426-9fbb-1c5e50347cfa"). InnerVolumeSpecName "kube-api-access-m7b9x". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.640643 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-scripts\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.644374 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "f7c1a3b5-d164-4426-9fbb-1c5e50347cfa" (UID: "f7c1a3b5-d164-4426-9fbb-1c5e50347cfa"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.645672 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.647788 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-scripts" (OuterVolumeSpecName: "scripts") pod "f7c1a3b5-d164-4426-9fbb-1c5e50347cfa" (UID: "f7c1a3b5-d164-4426-9fbb-1c5e50347cfa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.649481 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltvvb\" (UniqueName: \"kubernetes.io/projected/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-kube-api-access-ltvvb\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.649755 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-config-data\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.668060 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f7c1a3b5-d164-4426-9fbb-1c5e50347cfa" (UID: "f7c1a3b5-d164-4426-9fbb-1c5e50347cfa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.673711 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.689232 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-config-data" (OuterVolumeSpecName: "config-data") pod "f7c1a3b5-d164-4426-9fbb-1c5e50347cfa" (UID: "f7c1a3b5-d164-4426-9fbb-1c5e50347cfa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.734679 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m7b9x\" (UniqueName: \"kubernetes.io/projected/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-kube-api-access-m7b9x\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.734795 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.734853 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.734944 4838 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.734968 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.769544 4838 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.807123 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 11:14:13 crc kubenswrapper[4838]: I0202 11:14:13.837536 4838 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.353529 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.369296 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f7c1a3b5-d164-4426-9fbb-1c5e50347cfa","Type":"ContainerDied","Data":"36ec0f7de028a92b1256a93d1db87b430bc5f989307e9c3d2b60c317e70ecdbd"} Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.369654 4838 scope.go:117] "RemoveContainer" containerID="c9ee5f989625536b01cf7a603e165ab3a01dd84280c5d8fe34a6f701e1b48d76" Feb 02 11:14:14 crc kubenswrapper[4838]: W0202 11:14:14.369548 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb3adf1bb_98e2_4ab3_bd65_ac4b7b32c693.slice/crio-86a5e85813365269f41d680d587b76f662614b4771f4a557e8318cddf2156f77 WatchSource:0}: Error finding container 86a5e85813365269f41d680d587b76f662614b4771f4a557e8318cddf2156f77: Status 404 returned error can't find the container with id 86a5e85813365269f41d680d587b76f662614b4771f4a557e8318cddf2156f77 Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.369602 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.391039 4838 scope.go:117] "RemoveContainer" containerID="8257470b3cda351cad2c041b925b75e45fbd5d8085656c84a75d1486c30f843f" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.415013 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.441563 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.450423 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 11:14:14 crc kubenswrapper[4838]: E0202 11:14:14.450764 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7c1a3b5-d164-4426-9fbb-1c5e50347cfa" containerName="glance-httpd" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.450780 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7c1a3b5-d164-4426-9fbb-1c5e50347cfa" containerName="glance-httpd" Feb 02 11:14:14 crc kubenswrapper[4838]: E0202 11:14:14.450794 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7c1a3b5-d164-4426-9fbb-1c5e50347cfa" containerName="glance-log" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.450800 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7c1a3b5-d164-4426-9fbb-1c5e50347cfa" containerName="glance-log" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.450960 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7c1a3b5-d164-4426-9fbb-1c5e50347cfa" containerName="glance-log" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.450986 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7c1a3b5-d164-4426-9fbb-1c5e50347cfa" containerName="glance-httpd" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.451849 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.454251 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.454501 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.465089 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.519421 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ec80212-35c0-4405-937c-91af782b61c9" path="/var/lib/kubelet/pods/7ec80212-35c0-4405-937c-91af782b61c9/volumes" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.520895 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7c1a3b5-d164-4426-9fbb-1c5e50347cfa" path="/var/lib/kubelet/pods/f7c1a3b5-d164-4426-9fbb-1c5e50347cfa/volumes" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.556911 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.556982 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.557059 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.557095 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.557122 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjp6h\" (UniqueName: \"kubernetes.io/projected/785718c1-d706-4e71-8250-77f8326207d6-kube-api-access-hjp6h\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.558077 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.558279 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/785718c1-d706-4e71-8250-77f8326207d6-logs\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.558343 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/785718c1-d706-4e71-8250-77f8326207d6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.660283 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/785718c1-d706-4e71-8250-77f8326207d6-logs\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.660661 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/785718c1-d706-4e71-8250-77f8326207d6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.660720 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.660761 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.660845 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/785718c1-d706-4e71-8250-77f8326207d6-logs\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.660845 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.660907 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.660941 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjp6h\" (UniqueName: \"kubernetes.io/projected/785718c1-d706-4e71-8250-77f8326207d6-kube-api-access-hjp6h\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.660987 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.661028 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/785718c1-d706-4e71-8250-77f8326207d6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.663035 4838 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.667308 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.668017 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.674927 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.677903 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjp6h\" (UniqueName: \"kubernetes.io/projected/785718c1-d706-4e71-8250-77f8326207d6-kube-api-access-hjp6h\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.678647 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.695790 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:14:14 crc kubenswrapper[4838]: I0202 11:14:14.835083 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 11:14:15 crc kubenswrapper[4838]: I0202 11:14:15.384558 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693","Type":"ContainerStarted","Data":"3d0181f005eaa235a74d3c353269708285f0948e183cd37a5b3b101c16555cc3"} Feb 02 11:14:15 crc kubenswrapper[4838]: I0202 11:14:15.384942 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693","Type":"ContainerStarted","Data":"86a5e85813365269f41d680d587b76f662614b4771f4a557e8318cddf2156f77"} Feb 02 11:14:15 crc kubenswrapper[4838]: I0202 11:14:15.612048 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 11:14:17 crc kubenswrapper[4838]: I0202 11:14:17.410913 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:14:17 crc kubenswrapper[4838]: I0202 11:14:17.462498 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-4npgg"] Feb 02 11:14:17 crc kubenswrapper[4838]: I0202 11:14:17.462783 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" podUID="11d5db5b-3fc8-41a3-b60d-69391b17f4f5" containerName="dnsmasq-dns" containerID="cri-o://19d0d2218649d8d7f2f565c9f5139eb1acfb1ae0acae69a947e66543e1ce4dfd" gracePeriod=10 Feb 02 11:14:18 crc kubenswrapper[4838]: I0202 11:14:18.428092 4838 generic.go:334] "Generic (PLEG): container finished" podID="11d5db5b-3fc8-41a3-b60d-69391b17f4f5" containerID="19d0d2218649d8d7f2f565c9f5139eb1acfb1ae0acae69a947e66543e1ce4dfd" exitCode=0 Feb 02 11:14:18 crc kubenswrapper[4838]: I0202 11:14:18.428133 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" event={"ID":"11d5db5b-3fc8-41a3-b60d-69391b17f4f5","Type":"ContainerDied","Data":"19d0d2218649d8d7f2f565c9f5139eb1acfb1ae0acae69a947e66543e1ce4dfd"} Feb 02 11:14:20 crc kubenswrapper[4838]: I0202 11:14:20.485360 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" podUID="11d5db5b-3fc8-41a3-b60d-69391b17f4f5" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.142:5353: connect: connection refused" Feb 02 11:14:22 crc kubenswrapper[4838]: I0202 11:14:22.461692 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"785718c1-d706-4e71-8250-77f8326207d6","Type":"ContainerStarted","Data":"a9f6b25aac1fa0b6a3b76db6ae57f4fd65bcb77fb7f59bd4334ebf84b155852c"} Feb 02 11:14:22 crc kubenswrapper[4838]: I0202 11:14:22.790706 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:14:22 crc kubenswrapper[4838]: I0202 11:14:22.915196 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-dns-swift-storage-0\") pod \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " Feb 02 11:14:22 crc kubenswrapper[4838]: I0202 11:14:22.915311 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttqzg\" (UniqueName: \"kubernetes.io/projected/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-kube-api-access-ttqzg\") pod \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " Feb 02 11:14:22 crc kubenswrapper[4838]: I0202 11:14:22.915448 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-ovsdbserver-nb\") pod \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " Feb 02 11:14:22 crc kubenswrapper[4838]: I0202 11:14:22.915500 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-dns-svc\") pod \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " Feb 02 11:14:22 crc kubenswrapper[4838]: I0202 11:14:22.915552 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-ovsdbserver-sb\") pod \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " Feb 02 11:14:22 crc kubenswrapper[4838]: I0202 11:14:22.915584 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-config\") pod \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\" (UID: \"11d5db5b-3fc8-41a3-b60d-69391b17f4f5\") " Feb 02 11:14:22 crc kubenswrapper[4838]: I0202 11:14:22.929745 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-kube-api-access-ttqzg" (OuterVolumeSpecName: "kube-api-access-ttqzg") pod "11d5db5b-3fc8-41a3-b60d-69391b17f4f5" (UID: "11d5db5b-3fc8-41a3-b60d-69391b17f4f5"). InnerVolumeSpecName "kube-api-access-ttqzg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:14:22 crc kubenswrapper[4838]: I0202 11:14:22.980632 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "11d5db5b-3fc8-41a3-b60d-69391b17f4f5" (UID: "11d5db5b-3fc8-41a3-b60d-69391b17f4f5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:14:22 crc kubenswrapper[4838]: I0202 11:14:22.981344 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "11d5db5b-3fc8-41a3-b60d-69391b17f4f5" (UID: "11d5db5b-3fc8-41a3-b60d-69391b17f4f5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:14:22 crc kubenswrapper[4838]: I0202 11:14:22.993390 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-config" (OuterVolumeSpecName: "config") pod "11d5db5b-3fc8-41a3-b60d-69391b17f4f5" (UID: "11d5db5b-3fc8-41a3-b60d-69391b17f4f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:14:23 crc kubenswrapper[4838]: I0202 11:14:23.003806 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "11d5db5b-3fc8-41a3-b60d-69391b17f4f5" (UID: "11d5db5b-3fc8-41a3-b60d-69391b17f4f5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:14:23 crc kubenswrapper[4838]: I0202 11:14:23.017964 4838 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:23 crc kubenswrapper[4838]: I0202 11:14:23.018005 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:23 crc kubenswrapper[4838]: I0202 11:14:23.018016 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttqzg\" (UniqueName: \"kubernetes.io/projected/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-kube-api-access-ttqzg\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:23 crc kubenswrapper[4838]: I0202 11:14:23.018028 4838 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:23 crc kubenswrapper[4838]: I0202 11:14:23.018037 4838 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:23 crc kubenswrapper[4838]: I0202 11:14:23.022731 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "11d5db5b-3fc8-41a3-b60d-69391b17f4f5" (UID: "11d5db5b-3fc8-41a3-b60d-69391b17f4f5"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:14:23 crc kubenswrapper[4838]: I0202 11:14:23.120267 4838 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/11d5db5b-3fc8-41a3-b60d-69391b17f4f5-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:23 crc kubenswrapper[4838]: I0202 11:14:23.474434 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" event={"ID":"11d5db5b-3fc8-41a3-b60d-69391b17f4f5","Type":"ContainerDied","Data":"95702c7397a388b0f84882fd20c58fd1eb623157ed2dd90f2903f0f5327f1800"} Feb 02 11:14:23 crc kubenswrapper[4838]: I0202 11:14:23.474509 4838 scope.go:117] "RemoveContainer" containerID="19d0d2218649d8d7f2f565c9f5139eb1acfb1ae0acae69a947e66543e1ce4dfd" Feb 02 11:14:23 crc kubenswrapper[4838]: I0202 11:14:23.474521 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-4npgg" Feb 02 11:14:23 crc kubenswrapper[4838]: I0202 11:14:23.519267 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-4npgg"] Feb 02 11:14:23 crc kubenswrapper[4838]: I0202 11:14:23.530974 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-4npgg"] Feb 02 11:14:24 crc kubenswrapper[4838]: I0202 11:14:24.322391 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-554c88cfc6-svbvn" Feb 02 11:14:24 crc kubenswrapper[4838]: I0202 11:14:24.485260 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693","Type":"ContainerStarted","Data":"7d1e6d3b0d12ec5eacfcb88d9ce1001917c904b0a7bd5656688f8bef53536b25"} Feb 02 11:14:24 crc kubenswrapper[4838]: I0202 11:14:24.489316 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"785718c1-d706-4e71-8250-77f8326207d6","Type":"ContainerStarted","Data":"cd8ce6306c3129ac1384b3de5bee70d6589d86293795dd91eaabda36bf5587e2"} Feb 02 11:14:24 crc kubenswrapper[4838]: I0202 11:14:24.517850 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11d5db5b-3fc8-41a3-b60d-69391b17f4f5" path="/var/lib/kubelet/pods/11d5db5b-3fc8-41a3-b60d-69391b17f4f5/volumes" Feb 02 11:14:25 crc kubenswrapper[4838]: I0202 11:14:25.535284 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=12.535268621 podStartE2EDuration="12.535268621s" podCreationTimestamp="2026-02-02 11:14:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:14:25.53029923 +0000 UTC m=+1259.867400308" watchObservedRunningTime="2026-02-02 11:14:25.535268621 +0000 UTC m=+1259.872369649" Feb 02 11:14:26 crc kubenswrapper[4838]: I0202 11:14:26.095583 4838 scope.go:117] "RemoveContainer" containerID="909195a472f294640b13ec43b6c1a4446157fb76ee0bc42d640f3a0f126f57c0" Feb 02 11:14:26 crc kubenswrapper[4838]: E0202 11:14:26.393770 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="69335c29-7c9c-438d-ac8d-85141a4f9bb5" Feb 02 11:14:26 crc kubenswrapper[4838]: I0202 11:14:26.525478 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="69335c29-7c9c-438d-ac8d-85141a4f9bb5" containerName="ceilometer-notification-agent" containerID="cri-o://a84e0ce2d735e4167f4606dd17d211b1dbb2be9fa7f998fc51fe43fc67059f1a" gracePeriod=30 Feb 02 11:14:26 crc kubenswrapper[4838]: I0202 11:14:26.525515 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="69335c29-7c9c-438d-ac8d-85141a4f9bb5" containerName="proxy-httpd" containerID="cri-o://ddbe10a98f03864716e5f3d91d0bdf6c60bcf3d9adcb70a1a6594985f9928ef4" gracePeriod=30 Feb 02 11:14:26 crc kubenswrapper[4838]: I0202 11:14:26.525564 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="69335c29-7c9c-438d-ac8d-85141a4f9bb5" containerName="sg-core" containerID="cri-o://a2e6f8c7370409f7916d870010101c6f8924dea0dad807f3023768f11013ebba" gracePeriod=30 Feb 02 11:14:26 crc kubenswrapper[4838]: I0202 11:14:26.527529 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 11:14:26 crc kubenswrapper[4838]: I0202 11:14:26.527585 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"69335c29-7c9c-438d-ac8d-85141a4f9bb5","Type":"ContainerStarted","Data":"ddbe10a98f03864716e5f3d91d0bdf6c60bcf3d9adcb70a1a6594985f9928ef4"} Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.177545 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Feb 02 11:14:27 crc kubenswrapper[4838]: E0202 11:14:27.178120 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11d5db5b-3fc8-41a3-b60d-69391b17f4f5" containerName="init" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.178140 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="11d5db5b-3fc8-41a3-b60d-69391b17f4f5" containerName="init" Feb 02 11:14:27 crc kubenswrapper[4838]: E0202 11:14:27.178166 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11d5db5b-3fc8-41a3-b60d-69391b17f4f5" containerName="dnsmasq-dns" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.178174 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="11d5db5b-3fc8-41a3-b60d-69391b17f4f5" containerName="dnsmasq-dns" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.178341 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="11d5db5b-3fc8-41a3-b60d-69391b17f4f5" containerName="dnsmasq-dns" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.178900 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.182960 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.183275 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.183898 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-wh7gm" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.196815 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.302837 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406-combined-ca-bundle\") pod \"openstackclient\" (UID: \"7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406\") " pod="openstack/openstackclient" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.302886 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9m22z\" (UniqueName: \"kubernetes.io/projected/7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406-kube-api-access-9m22z\") pod \"openstackclient\" (UID: \"7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406\") " pod="openstack/openstackclient" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.302917 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406-openstack-config\") pod \"openstackclient\" (UID: \"7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406\") " pod="openstack/openstackclient" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.303118 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406-openstack-config-secret\") pod \"openstackclient\" (UID: \"7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406\") " pod="openstack/openstackclient" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.404869 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406-combined-ca-bundle\") pod \"openstackclient\" (UID: \"7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406\") " pod="openstack/openstackclient" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.404926 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9m22z\" (UniqueName: \"kubernetes.io/projected/7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406-kube-api-access-9m22z\") pod \"openstackclient\" (UID: \"7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406\") " pod="openstack/openstackclient" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.404957 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406-openstack-config\") pod \"openstackclient\" (UID: \"7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406\") " pod="openstack/openstackclient" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.405010 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406-openstack-config-secret\") pod \"openstackclient\" (UID: \"7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406\") " pod="openstack/openstackclient" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.406278 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406-openstack-config\") pod \"openstackclient\" (UID: \"7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406\") " pod="openstack/openstackclient" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.411239 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406-combined-ca-bundle\") pod \"openstackclient\" (UID: \"7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406\") " pod="openstack/openstackclient" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.411300 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406-openstack-config-secret\") pod \"openstackclient\" (UID: \"7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406\") " pod="openstack/openstackclient" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.425046 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9m22z\" (UniqueName: \"kubernetes.io/projected/7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406-kube-api-access-9m22z\") pod \"openstackclient\" (UID: \"7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406\") " pod="openstack/openstackclient" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.497581 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.560379 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"785718c1-d706-4e71-8250-77f8326207d6","Type":"ContainerStarted","Data":"cb31ed3f580613dfeda4c2802d70297df8ef3ac8bcbeeb274398e529a1d999be"} Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.570742 4838 generic.go:334] "Generic (PLEG): container finished" podID="69335c29-7c9c-438d-ac8d-85141a4f9bb5" containerID="ddbe10a98f03864716e5f3d91d0bdf6c60bcf3d9adcb70a1a6594985f9928ef4" exitCode=0 Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.570830 4838 generic.go:334] "Generic (PLEG): container finished" podID="69335c29-7c9c-438d-ac8d-85141a4f9bb5" containerID="a2e6f8c7370409f7916d870010101c6f8924dea0dad807f3023768f11013ebba" exitCode=2 Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.570846 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"69335c29-7c9c-438d-ac8d-85141a4f9bb5","Type":"ContainerDied","Data":"ddbe10a98f03864716e5f3d91d0bdf6c60bcf3d9adcb70a1a6594985f9928ef4"} Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.570923 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"69335c29-7c9c-438d-ac8d-85141a4f9bb5","Type":"ContainerDied","Data":"a2e6f8c7370409f7916d870010101c6f8924dea0dad807f3023768f11013ebba"} Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.574820 4838 generic.go:334] "Generic (PLEG): container finished" podID="04bf896a-e964-48a2-900e-44362394a6ac" containerID="41eabf14009d9af258d1c5bda9d7061a8d67f6423b97d75aabeae0f07cb57da0" exitCode=0 Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.574852 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-db-sync-z8sb2" event={"ID":"04bf896a-e964-48a2-900e-44362394a6ac","Type":"ContainerDied","Data":"41eabf14009d9af258d1c5bda9d7061a8d67f6423b97d75aabeae0f07cb57da0"} Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.590821 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=13.59080446 podStartE2EDuration="13.59080446s" podCreationTimestamp="2026-02-02 11:14:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:14:27.590240825 +0000 UTC m=+1261.927341903" watchObservedRunningTime="2026-02-02 11:14:27.59080446 +0000 UTC m=+1261.927905488" Feb 02 11:14:27 crc kubenswrapper[4838]: I0202 11:14:27.986821 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 02 11:14:27 crc kubenswrapper[4838]: W0202 11:14:27.990694 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7cb40c61_e9a4_44fb_b15c_fa2ffb3b7406.slice/crio-2a41baa5bbd8e26dc93192d72012815923400c8cccd14876f3dc87e2ca568a73 WatchSource:0}: Error finding container 2a41baa5bbd8e26dc93192d72012815923400c8cccd14876f3dc87e2ca568a73: Status 404 returned error can't find the container with id 2a41baa5bbd8e26dc93192d72012815923400c8cccd14876f3dc87e2ca568a73 Feb 02 11:14:28 crc kubenswrapper[4838]: I0202 11:14:28.585533 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-db-sync-z8sb2" event={"ID":"04bf896a-e964-48a2-900e-44362394a6ac","Type":"ContainerStarted","Data":"12e95395a0c60b1f95b2e3d8453a9e78847600a049aa654dddca355aeb90889b"} Feb 02 11:14:28 crc kubenswrapper[4838]: I0202 11:14:28.589811 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406","Type":"ContainerStarted","Data":"2a41baa5bbd8e26dc93192d72012815923400c8cccd14876f3dc87e2ca568a73"} Feb 02 11:14:28 crc kubenswrapper[4838]: I0202 11:14:28.608851 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ironic-db-sync-z8sb2" podStartSLOduration=18.388226662 podStartE2EDuration="1m3.608826341s" podCreationTimestamp="2026-02-02 11:13:25 +0000 UTC" firstStartedPulling="2026-02-02 11:13:41.572390976 +0000 UTC m=+1215.909492004" lastFinishedPulling="2026-02-02 11:14:26.792990655 +0000 UTC m=+1261.130091683" observedRunningTime="2026-02-02 11:14:28.603650154 +0000 UTC m=+1262.940751192" watchObservedRunningTime="2026-02-02 11:14:28.608826341 +0000 UTC m=+1262.945927369" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.401145 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.541530 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxmgv\" (UniqueName: \"kubernetes.io/projected/69335c29-7c9c-438d-ac8d-85141a4f9bb5-kube-api-access-pxmgv\") pod \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.541693 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-combined-ca-bundle\") pod \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.541724 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-config-data\") pod \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.541778 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/69335c29-7c9c-438d-ac8d-85141a4f9bb5-run-httpd\") pod \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.541830 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-sg-core-conf-yaml\") pod \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.541859 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/69335c29-7c9c-438d-ac8d-85141a4f9bb5-log-httpd\") pod \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.542178 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69335c29-7c9c-438d-ac8d-85141a4f9bb5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "69335c29-7c9c-438d-ac8d-85141a4f9bb5" (UID: "69335c29-7c9c-438d-ac8d-85141a4f9bb5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.542719 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69335c29-7c9c-438d-ac8d-85141a4f9bb5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "69335c29-7c9c-438d-ac8d-85141a4f9bb5" (UID: "69335c29-7c9c-438d-ac8d-85141a4f9bb5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.542801 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-scripts\") pod \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\" (UID: \"69335c29-7c9c-438d-ac8d-85141a4f9bb5\") " Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.543385 4838 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/69335c29-7c9c-438d-ac8d-85141a4f9bb5-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.543403 4838 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/69335c29-7c9c-438d-ac8d-85141a4f9bb5-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.550745 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-scripts" (OuterVolumeSpecName: "scripts") pod "69335c29-7c9c-438d-ac8d-85141a4f9bb5" (UID: "69335c29-7c9c-438d-ac8d-85141a4f9bb5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.564763 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69335c29-7c9c-438d-ac8d-85141a4f9bb5-kube-api-access-pxmgv" (OuterVolumeSpecName: "kube-api-access-pxmgv") pod "69335c29-7c9c-438d-ac8d-85141a4f9bb5" (UID: "69335c29-7c9c-438d-ac8d-85141a4f9bb5"). InnerVolumeSpecName "kube-api-access-pxmgv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.569848 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "69335c29-7c9c-438d-ac8d-85141a4f9bb5" (UID: "69335c29-7c9c-438d-ac8d-85141a4f9bb5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.607653 4838 generic.go:334] "Generic (PLEG): container finished" podID="69335c29-7c9c-438d-ac8d-85141a4f9bb5" containerID="a84e0ce2d735e4167f4606dd17d211b1dbb2be9fa7f998fc51fe43fc67059f1a" exitCode=0 Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.607694 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"69335c29-7c9c-438d-ac8d-85141a4f9bb5","Type":"ContainerDied","Data":"a84e0ce2d735e4167f4606dd17d211b1dbb2be9fa7f998fc51fe43fc67059f1a"} Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.607720 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"69335c29-7c9c-438d-ac8d-85141a4f9bb5","Type":"ContainerDied","Data":"bcbfe5646f1ec57fe89c0d9b1cb903ee218b20e43e295f0014c611d7df243b3c"} Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.607737 4838 scope.go:117] "RemoveContainer" containerID="ddbe10a98f03864716e5f3d91d0bdf6c60bcf3d9adcb70a1a6594985f9928ef4" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.607856 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.620434 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "69335c29-7c9c-438d-ac8d-85141a4f9bb5" (UID: "69335c29-7c9c-438d-ac8d-85141a4f9bb5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.625911 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-config-data" (OuterVolumeSpecName: "config-data") pod "69335c29-7c9c-438d-ac8d-85141a4f9bb5" (UID: "69335c29-7c9c-438d-ac8d-85141a4f9bb5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.645536 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.645567 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.645576 4838 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.645585 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69335c29-7c9c-438d-ac8d-85141a4f9bb5-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.645594 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxmgv\" (UniqueName: \"kubernetes.io/projected/69335c29-7c9c-438d-ac8d-85141a4f9bb5-kube-api-access-pxmgv\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.682010 4838 scope.go:117] "RemoveContainer" containerID="a2e6f8c7370409f7916d870010101c6f8924dea0dad807f3023768f11013ebba" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.700064 4838 scope.go:117] "RemoveContainer" containerID="a84e0ce2d735e4167f4606dd17d211b1dbb2be9fa7f998fc51fe43fc67059f1a" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.723329 4838 scope.go:117] "RemoveContainer" containerID="ddbe10a98f03864716e5f3d91d0bdf6c60bcf3d9adcb70a1a6594985f9928ef4" Feb 02 11:14:29 crc kubenswrapper[4838]: E0202 11:14:29.723828 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ddbe10a98f03864716e5f3d91d0bdf6c60bcf3d9adcb70a1a6594985f9928ef4\": container with ID starting with ddbe10a98f03864716e5f3d91d0bdf6c60bcf3d9adcb70a1a6594985f9928ef4 not found: ID does not exist" containerID="ddbe10a98f03864716e5f3d91d0bdf6c60bcf3d9adcb70a1a6594985f9928ef4" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.723863 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddbe10a98f03864716e5f3d91d0bdf6c60bcf3d9adcb70a1a6594985f9928ef4"} err="failed to get container status \"ddbe10a98f03864716e5f3d91d0bdf6c60bcf3d9adcb70a1a6594985f9928ef4\": rpc error: code = NotFound desc = could not find container \"ddbe10a98f03864716e5f3d91d0bdf6c60bcf3d9adcb70a1a6594985f9928ef4\": container with ID starting with ddbe10a98f03864716e5f3d91d0bdf6c60bcf3d9adcb70a1a6594985f9928ef4 not found: ID does not exist" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.723884 4838 scope.go:117] "RemoveContainer" containerID="a2e6f8c7370409f7916d870010101c6f8924dea0dad807f3023768f11013ebba" Feb 02 11:14:29 crc kubenswrapper[4838]: E0202 11:14:29.724107 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2e6f8c7370409f7916d870010101c6f8924dea0dad807f3023768f11013ebba\": container with ID starting with a2e6f8c7370409f7916d870010101c6f8924dea0dad807f3023768f11013ebba not found: ID does not exist" containerID="a2e6f8c7370409f7916d870010101c6f8924dea0dad807f3023768f11013ebba" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.724127 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2e6f8c7370409f7916d870010101c6f8924dea0dad807f3023768f11013ebba"} err="failed to get container status \"a2e6f8c7370409f7916d870010101c6f8924dea0dad807f3023768f11013ebba\": rpc error: code = NotFound desc = could not find container \"a2e6f8c7370409f7916d870010101c6f8924dea0dad807f3023768f11013ebba\": container with ID starting with a2e6f8c7370409f7916d870010101c6f8924dea0dad807f3023768f11013ebba not found: ID does not exist" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.724140 4838 scope.go:117] "RemoveContainer" containerID="a84e0ce2d735e4167f4606dd17d211b1dbb2be9fa7f998fc51fe43fc67059f1a" Feb 02 11:14:29 crc kubenswrapper[4838]: E0202 11:14:29.724464 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a84e0ce2d735e4167f4606dd17d211b1dbb2be9fa7f998fc51fe43fc67059f1a\": container with ID starting with a84e0ce2d735e4167f4606dd17d211b1dbb2be9fa7f998fc51fe43fc67059f1a not found: ID does not exist" containerID="a84e0ce2d735e4167f4606dd17d211b1dbb2be9fa7f998fc51fe43fc67059f1a" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.724485 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a84e0ce2d735e4167f4606dd17d211b1dbb2be9fa7f998fc51fe43fc67059f1a"} err="failed to get container status \"a84e0ce2d735e4167f4606dd17d211b1dbb2be9fa7f998fc51fe43fc67059f1a\": rpc error: code = NotFound desc = could not find container \"a84e0ce2d735e4167f4606dd17d211b1dbb2be9fa7f998fc51fe43fc67059f1a\": container with ID starting with a84e0ce2d735e4167f4606dd17d211b1dbb2be9fa7f998fc51fe43fc67059f1a not found: ID does not exist" Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.963803 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:14:29 crc kubenswrapper[4838]: I0202 11:14:29.970249 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.032315 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:14:30 crc kubenswrapper[4838]: E0202 11:14:30.032701 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69335c29-7c9c-438d-ac8d-85141a4f9bb5" containerName="sg-core" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.032717 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="69335c29-7c9c-438d-ac8d-85141a4f9bb5" containerName="sg-core" Feb 02 11:14:30 crc kubenswrapper[4838]: E0202 11:14:30.032740 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69335c29-7c9c-438d-ac8d-85141a4f9bb5" containerName="ceilometer-notification-agent" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.032748 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="69335c29-7c9c-438d-ac8d-85141a4f9bb5" containerName="ceilometer-notification-agent" Feb 02 11:14:30 crc kubenswrapper[4838]: E0202 11:14:30.032761 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69335c29-7c9c-438d-ac8d-85141a4f9bb5" containerName="proxy-httpd" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.032767 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="69335c29-7c9c-438d-ac8d-85141a4f9bb5" containerName="proxy-httpd" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.032920 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="69335c29-7c9c-438d-ac8d-85141a4f9bb5" containerName="ceilometer-notification-agent" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.032940 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="69335c29-7c9c-438d-ac8d-85141a4f9bb5" containerName="sg-core" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.032949 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="69335c29-7c9c-438d-ac8d-85141a4f9bb5" containerName="proxy-httpd" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.034340 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.040049 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.040199 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.058317 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.164955 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.165006 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.165040 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ddd3c545-36ba-4568-9f0b-621669c69e2a-run-httpd\") pod \"ceilometer-0\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.165107 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-scripts\") pod \"ceilometer-0\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.165136 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ddd3c545-36ba-4568-9f0b-621669c69e2a-log-httpd\") pod \"ceilometer-0\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.165152 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nbs6\" (UniqueName: \"kubernetes.io/projected/ddd3c545-36ba-4568-9f0b-621669c69e2a-kube-api-access-6nbs6\") pod \"ceilometer-0\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.165173 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-config-data\") pod \"ceilometer-0\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.268690 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-scripts\") pod \"ceilometer-0\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.268776 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ddd3c545-36ba-4568-9f0b-621669c69e2a-log-httpd\") pod \"ceilometer-0\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.268804 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nbs6\" (UniqueName: \"kubernetes.io/projected/ddd3c545-36ba-4568-9f0b-621669c69e2a-kube-api-access-6nbs6\") pod \"ceilometer-0\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.268838 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-config-data\") pod \"ceilometer-0\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.269702 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.270056 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.270114 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ddd3c545-36ba-4568-9f0b-621669c69e2a-run-httpd\") pod \"ceilometer-0\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.270589 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ddd3c545-36ba-4568-9f0b-621669c69e2a-log-httpd\") pod \"ceilometer-0\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.271864 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ddd3c545-36ba-4568-9f0b-621669c69e2a-run-httpd\") pod \"ceilometer-0\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.274319 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.275261 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-scripts\") pod \"ceilometer-0\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.276222 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-config-data\") pod \"ceilometer-0\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.276840 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.286444 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nbs6\" (UniqueName: \"kubernetes.io/projected/ddd3c545-36ba-4568-9f0b-621669c69e2a-kube-api-access-6nbs6\") pod \"ceilometer-0\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.359604 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.519305 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69335c29-7c9c-438d-ac8d-85141a4f9bb5" path="/var/lib/kubelet/pods/69335c29-7c9c-438d-ac8d-85141a4f9bb5/volumes" Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.801263 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:14:30 crc kubenswrapper[4838]: W0202 11:14:30.810122 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podddd3c545_36ba_4568_9f0b_621669c69e2a.slice/crio-a170a95e892f52ade3eff686fccb1c2d0dfbee84a6e2c528ea902136c42e007c WatchSource:0}: Error finding container a170a95e892f52ade3eff686fccb1c2d0dfbee84a6e2c528ea902136c42e007c: Status 404 returned error can't find the container with id a170a95e892f52ade3eff686fccb1c2d0dfbee84a6e2c528ea902136c42e007c Feb 02 11:14:30 crc kubenswrapper[4838]: I0202 11:14:30.814059 4838 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 11:14:31 crc kubenswrapper[4838]: I0202 11:14:31.632691 4838 generic.go:334] "Generic (PLEG): container finished" podID="3c1f48f2-93aa-4b92-a289-7869c1993629" containerID="c855af0fa2c08c2144c31666a641e6cbb7ea66182beb1e800da4f1f4d8f51032" exitCode=0 Feb 02 11:14:31 crc kubenswrapper[4838]: I0202 11:14:31.632798 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-swztr" event={"ID":"3c1f48f2-93aa-4b92-a289-7869c1993629","Type":"ContainerDied","Data":"c855af0fa2c08c2144c31666a641e6cbb7ea66182beb1e800da4f1f4d8f51032"} Feb 02 11:14:31 crc kubenswrapper[4838]: I0202 11:14:31.634695 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ddd3c545-36ba-4568-9f0b-621669c69e2a","Type":"ContainerStarted","Data":"a170a95e892f52ade3eff686fccb1c2d0dfbee84a6e2c528ea902136c42e007c"} Feb 02 11:14:33 crc kubenswrapper[4838]: I0202 11:14:33.808251 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 02 11:14:33 crc kubenswrapper[4838]: I0202 11:14:33.808861 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 02 11:14:33 crc kubenswrapper[4838]: I0202 11:14:33.858844 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 02 11:14:33 crc kubenswrapper[4838]: I0202 11:14:33.861949 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 02 11:14:34 crc kubenswrapper[4838]: I0202 11:14:34.658262 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 02 11:14:34 crc kubenswrapper[4838]: I0202 11:14:34.658592 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 02 11:14:34 crc kubenswrapper[4838]: I0202 11:14:34.836061 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 02 11:14:34 crc kubenswrapper[4838]: I0202 11:14:34.838038 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 02 11:14:34 crc kubenswrapper[4838]: I0202 11:14:34.886520 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 02 11:14:34 crc kubenswrapper[4838]: I0202 11:14:34.888492 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 02 11:14:35 crc kubenswrapper[4838]: I0202 11:14:35.668072 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 02 11:14:35 crc kubenswrapper[4838]: I0202 11:14:35.668298 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 02 11:14:37 crc kubenswrapper[4838]: I0202 11:14:37.072248 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 02 11:14:37 crc kubenswrapper[4838]: I0202 11:14:37.072593 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 02 11:14:37 crc kubenswrapper[4838]: I0202 11:14:37.725963 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 02 11:14:37 crc kubenswrapper[4838]: I0202 11:14:37.726062 4838 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 11:14:38 crc kubenswrapper[4838]: I0202 11:14:38.552553 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 02 11:14:43 crc kubenswrapper[4838]: I0202 11:14:43.563280 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-swztr" Feb 02 11:14:43 crc kubenswrapper[4838]: I0202 11:14:43.621848 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c1f48f2-93aa-4b92-a289-7869c1993629-config-data\") pod \"3c1f48f2-93aa-4b92-a289-7869c1993629\" (UID: \"3c1f48f2-93aa-4b92-a289-7869c1993629\") " Feb 02 11:14:43 crc kubenswrapper[4838]: I0202 11:14:43.621894 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c1f48f2-93aa-4b92-a289-7869c1993629-logs\") pod \"3c1f48f2-93aa-4b92-a289-7869c1993629\" (UID: \"3c1f48f2-93aa-4b92-a289-7869c1993629\") " Feb 02 11:14:43 crc kubenswrapper[4838]: I0202 11:14:43.621921 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c1f48f2-93aa-4b92-a289-7869c1993629-combined-ca-bundle\") pod \"3c1f48f2-93aa-4b92-a289-7869c1993629\" (UID: \"3c1f48f2-93aa-4b92-a289-7869c1993629\") " Feb 02 11:14:43 crc kubenswrapper[4838]: I0202 11:14:43.623724 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c1f48f2-93aa-4b92-a289-7869c1993629-logs" (OuterVolumeSpecName: "logs") pod "3c1f48f2-93aa-4b92-a289-7869c1993629" (UID: "3c1f48f2-93aa-4b92-a289-7869c1993629"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:14:43 crc kubenswrapper[4838]: I0202 11:14:43.647898 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c1f48f2-93aa-4b92-a289-7869c1993629-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3c1f48f2-93aa-4b92-a289-7869c1993629" (UID: "3c1f48f2-93aa-4b92-a289-7869c1993629"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:14:43 crc kubenswrapper[4838]: I0202 11:14:43.652299 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c1f48f2-93aa-4b92-a289-7869c1993629-config-data" (OuterVolumeSpecName: "config-data") pod "3c1f48f2-93aa-4b92-a289-7869c1993629" (UID: "3c1f48f2-93aa-4b92-a289-7869c1993629"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:14:43 crc kubenswrapper[4838]: I0202 11:14:43.723247 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8pwx\" (UniqueName: \"kubernetes.io/projected/3c1f48f2-93aa-4b92-a289-7869c1993629-kube-api-access-x8pwx\") pod \"3c1f48f2-93aa-4b92-a289-7869c1993629\" (UID: \"3c1f48f2-93aa-4b92-a289-7869c1993629\") " Feb 02 11:14:43 crc kubenswrapper[4838]: I0202 11:14:43.723307 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c1f48f2-93aa-4b92-a289-7869c1993629-scripts\") pod \"3c1f48f2-93aa-4b92-a289-7869c1993629\" (UID: \"3c1f48f2-93aa-4b92-a289-7869c1993629\") " Feb 02 11:14:43 crc kubenswrapper[4838]: I0202 11:14:43.723554 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c1f48f2-93aa-4b92-a289-7869c1993629-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:43 crc kubenswrapper[4838]: I0202 11:14:43.723566 4838 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c1f48f2-93aa-4b92-a289-7869c1993629-logs\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:43 crc kubenswrapper[4838]: I0202 11:14:43.723574 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c1f48f2-93aa-4b92-a289-7869c1993629-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:43 crc kubenswrapper[4838]: I0202 11:14:43.726653 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c1f48f2-93aa-4b92-a289-7869c1993629-kube-api-access-x8pwx" (OuterVolumeSpecName: "kube-api-access-x8pwx") pod "3c1f48f2-93aa-4b92-a289-7869c1993629" (UID: "3c1f48f2-93aa-4b92-a289-7869c1993629"). InnerVolumeSpecName "kube-api-access-x8pwx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:14:43 crc kubenswrapper[4838]: I0202 11:14:43.726988 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c1f48f2-93aa-4b92-a289-7869c1993629-scripts" (OuterVolumeSpecName: "scripts") pod "3c1f48f2-93aa-4b92-a289-7869c1993629" (UID: "3c1f48f2-93aa-4b92-a289-7869c1993629"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:14:43 crc kubenswrapper[4838]: I0202 11:14:43.735292 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-swztr" event={"ID":"3c1f48f2-93aa-4b92-a289-7869c1993629","Type":"ContainerDied","Data":"cd61b098d27d82d5d0c274a15237eb3468017f96ebcca6faabe6a4f08ad3e755"} Feb 02 11:14:43 crc kubenswrapper[4838]: I0202 11:14:43.735342 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd61b098d27d82d5d0c274a15237eb3468017f96ebcca6faabe6a4f08ad3e755" Feb 02 11:14:43 crc kubenswrapper[4838]: I0202 11:14:43.735407 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-swztr" Feb 02 11:14:43 crc kubenswrapper[4838]: I0202 11:14:43.824976 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8pwx\" (UniqueName: \"kubernetes.io/projected/3c1f48f2-93aa-4b92-a289-7869c1993629-kube-api-access-x8pwx\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:43 crc kubenswrapper[4838]: I0202 11:14:43.825017 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c1f48f2-93aa-4b92-a289-7869c1993629-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.673530 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-b4ffff5fb-46ldv"] Feb 02 11:14:44 crc kubenswrapper[4838]: E0202 11:14:44.674085 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c1f48f2-93aa-4b92-a289-7869c1993629" containerName="placement-db-sync" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.674104 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c1f48f2-93aa-4b92-a289-7869c1993629" containerName="placement-db-sync" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.674338 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c1f48f2-93aa-4b92-a289-7869c1993629" containerName="placement-db-sync" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.679694 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.682581 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.683424 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.683483 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.683500 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-rdxct" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.687103 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.698515 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-b4ffff5fb-46ldv"] Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.839837 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32a23a44-9dae-46da-9895-dcd512447d9c-internal-tls-certs\") pod \"placement-b4ffff5fb-46ldv\" (UID: \"32a23a44-9dae-46da-9895-dcd512447d9c\") " pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.839905 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32a23a44-9dae-46da-9895-dcd512447d9c-logs\") pod \"placement-b4ffff5fb-46ldv\" (UID: \"32a23a44-9dae-46da-9895-dcd512447d9c\") " pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.839938 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjrhc\" (UniqueName: \"kubernetes.io/projected/32a23a44-9dae-46da-9895-dcd512447d9c-kube-api-access-jjrhc\") pod \"placement-b4ffff5fb-46ldv\" (UID: \"32a23a44-9dae-46da-9895-dcd512447d9c\") " pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.839955 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32a23a44-9dae-46da-9895-dcd512447d9c-config-data\") pod \"placement-b4ffff5fb-46ldv\" (UID: \"32a23a44-9dae-46da-9895-dcd512447d9c\") " pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.840004 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32a23a44-9dae-46da-9895-dcd512447d9c-scripts\") pod \"placement-b4ffff5fb-46ldv\" (UID: \"32a23a44-9dae-46da-9895-dcd512447d9c\") " pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.840024 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32a23a44-9dae-46da-9895-dcd512447d9c-combined-ca-bundle\") pod \"placement-b4ffff5fb-46ldv\" (UID: \"32a23a44-9dae-46da-9895-dcd512447d9c\") " pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.840062 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32a23a44-9dae-46da-9895-dcd512447d9c-public-tls-certs\") pod \"placement-b4ffff5fb-46ldv\" (UID: \"32a23a44-9dae-46da-9895-dcd512447d9c\") " pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.941984 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32a23a44-9dae-46da-9895-dcd512447d9c-scripts\") pod \"placement-b4ffff5fb-46ldv\" (UID: \"32a23a44-9dae-46da-9895-dcd512447d9c\") " pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.942048 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32a23a44-9dae-46da-9895-dcd512447d9c-combined-ca-bundle\") pod \"placement-b4ffff5fb-46ldv\" (UID: \"32a23a44-9dae-46da-9895-dcd512447d9c\") " pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.942105 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32a23a44-9dae-46da-9895-dcd512447d9c-public-tls-certs\") pod \"placement-b4ffff5fb-46ldv\" (UID: \"32a23a44-9dae-46da-9895-dcd512447d9c\") " pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.942157 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32a23a44-9dae-46da-9895-dcd512447d9c-internal-tls-certs\") pod \"placement-b4ffff5fb-46ldv\" (UID: \"32a23a44-9dae-46da-9895-dcd512447d9c\") " pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.942201 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32a23a44-9dae-46da-9895-dcd512447d9c-logs\") pod \"placement-b4ffff5fb-46ldv\" (UID: \"32a23a44-9dae-46da-9895-dcd512447d9c\") " pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.942260 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjrhc\" (UniqueName: \"kubernetes.io/projected/32a23a44-9dae-46da-9895-dcd512447d9c-kube-api-access-jjrhc\") pod \"placement-b4ffff5fb-46ldv\" (UID: \"32a23a44-9dae-46da-9895-dcd512447d9c\") " pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.942280 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32a23a44-9dae-46da-9895-dcd512447d9c-config-data\") pod \"placement-b4ffff5fb-46ldv\" (UID: \"32a23a44-9dae-46da-9895-dcd512447d9c\") " pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.955529 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32a23a44-9dae-46da-9895-dcd512447d9c-scripts\") pod \"placement-b4ffff5fb-46ldv\" (UID: \"32a23a44-9dae-46da-9895-dcd512447d9c\") " pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.958581 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32a23a44-9dae-46da-9895-dcd512447d9c-combined-ca-bundle\") pod \"placement-b4ffff5fb-46ldv\" (UID: \"32a23a44-9dae-46da-9895-dcd512447d9c\") " pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.960048 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32a23a44-9dae-46da-9895-dcd512447d9c-logs\") pod \"placement-b4ffff5fb-46ldv\" (UID: \"32a23a44-9dae-46da-9895-dcd512447d9c\") " pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.961584 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32a23a44-9dae-46da-9895-dcd512447d9c-config-data\") pod \"placement-b4ffff5fb-46ldv\" (UID: \"32a23a44-9dae-46da-9895-dcd512447d9c\") " pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.965202 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32a23a44-9dae-46da-9895-dcd512447d9c-internal-tls-certs\") pod \"placement-b4ffff5fb-46ldv\" (UID: \"32a23a44-9dae-46da-9895-dcd512447d9c\") " pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.974275 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32a23a44-9dae-46da-9895-dcd512447d9c-public-tls-certs\") pod \"placement-b4ffff5fb-46ldv\" (UID: \"32a23a44-9dae-46da-9895-dcd512447d9c\") " pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:44 crc kubenswrapper[4838]: I0202 11:14:44.996411 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjrhc\" (UniqueName: \"kubernetes.io/projected/32a23a44-9dae-46da-9895-dcd512447d9c-kube-api-access-jjrhc\") pod \"placement-b4ffff5fb-46ldv\" (UID: \"32a23a44-9dae-46da-9895-dcd512447d9c\") " pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:45 crc kubenswrapper[4838]: I0202 11:14:45.057491 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:45 crc kubenswrapper[4838]: W0202 11:14:45.767361 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32a23a44_9dae_46da_9895_dcd512447d9c.slice/crio-ac483ddfd86ece64b128b65f64ccc28dac6b96c689dfdf5d2d0ff23553200bf7 WatchSource:0}: Error finding container ac483ddfd86ece64b128b65f64ccc28dac6b96c689dfdf5d2d0ff23553200bf7: Status 404 returned error can't find the container with id ac483ddfd86ece64b128b65f64ccc28dac6b96c689dfdf5d2d0ff23553200bf7 Feb 02 11:14:45 crc kubenswrapper[4838]: I0202 11:14:45.775847 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-b4ffff5fb-46ldv"] Feb 02 11:14:46 crc kubenswrapper[4838]: I0202 11:14:46.775256 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b4ffff5fb-46ldv" event={"ID":"32a23a44-9dae-46da-9895-dcd512447d9c","Type":"ContainerStarted","Data":"ac483ddfd86ece64b128b65f64ccc28dac6b96c689dfdf5d2d0ff23553200bf7"} Feb 02 11:14:47 crc kubenswrapper[4838]: I0202 11:14:47.788946 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ddd3c545-36ba-4568-9f0b-621669c69e2a","Type":"ContainerStarted","Data":"318e660dfb9201c80432337a904a8ccfcc05737d11a03ebc12ae6a1a04174aeb"} Feb 02 11:14:49 crc kubenswrapper[4838]: I0202 11:14:49.188723 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="metallb-system/frr-k8s-ltddp" podUID="b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6" containerName="frr" probeResult="failure" output="Get \"http://127.0.0.1:7573/livez\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 11:14:50 crc kubenswrapper[4838]: I0202 11:14:50.811789 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b4ffff5fb-46ldv" event={"ID":"32a23a44-9dae-46da-9895-dcd512447d9c","Type":"ContainerStarted","Data":"4bed3264efcdfb384bf817c4830d17ff6eba9e0577b03067821e785f82bc2aac"} Feb 02 11:14:52 crc kubenswrapper[4838]: I0202 11:14:52.850698 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b4ffff5fb-46ldv" event={"ID":"32a23a44-9dae-46da-9895-dcd512447d9c","Type":"ContainerStarted","Data":"a6873ef37f6a0d56b92f3fbcf4871a42731d8bc98d1212781cc7bf44faf4378f"} Feb 02 11:14:53 crc kubenswrapper[4838]: I0202 11:14:53.858121 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:53 crc kubenswrapper[4838]: I0202 11:14:53.858491 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:14:53 crc kubenswrapper[4838]: I0202 11:14:53.888437 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-b4ffff5fb-46ldv" podStartSLOduration=9.888414247 podStartE2EDuration="9.888414247s" podCreationTimestamp="2026-02-02 11:14:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:14:53.878726091 +0000 UTC m=+1288.215827149" watchObservedRunningTime="2026-02-02 11:14:53.888414247 +0000 UTC m=+1288.225515275" Feb 02 11:14:58 crc kubenswrapper[4838]: E0202 11:14:58.357348 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified" Feb 02 11:14:58 crc kubenswrapper[4838]: E0202 11:14:58.358019 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:openstackclient,Image:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,Command:[/bin/sleep],Args:[infinity],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n55bh646h5d7hbbh88h688h6fh65dh56dh5c7h695h9dh5fdh547h66bh66bh696h65fhc4h6h596h586h56bhf9h6fh5d7hdfh8hc8h678h5d7h668q,ValueFrom:nil,},EnvVar{Name:OS_CLOUD,Value:default,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_HOST,Value:metric-storage-prometheus.openstack.svc,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_PORT,Value:9090,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openstack-config,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/cloudrc,SubPath:cloudrc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9m22z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42401,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:*42401,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstackclient_openstack(7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:14:58 crc kubenswrapper[4838]: E0202 11:14:58.359306 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstackclient" podUID="7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406" Feb 02 11:15:00 crc kubenswrapper[4838]: I0202 11:15:00.133138 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500515-zpqx2"] Feb 02 11:15:00 crc kubenswrapper[4838]: I0202 11:15:00.134330 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500515-zpqx2" Feb 02 11:15:00 crc kubenswrapper[4838]: I0202 11:15:00.136584 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 11:15:00 crc kubenswrapper[4838]: I0202 11:15:00.136861 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 11:15:00 crc kubenswrapper[4838]: I0202 11:15:00.146738 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500515-zpqx2"] Feb 02 11:15:00 crc kubenswrapper[4838]: I0202 11:15:00.254939 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6f4a7ccd-2447-42ca-8ef8-8a7823cdf679-config-volume\") pod \"collect-profiles-29500515-zpqx2\" (UID: \"6f4a7ccd-2447-42ca-8ef8-8a7823cdf679\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500515-zpqx2" Feb 02 11:15:00 crc kubenswrapper[4838]: I0202 11:15:00.255289 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6f4a7ccd-2447-42ca-8ef8-8a7823cdf679-secret-volume\") pod \"collect-profiles-29500515-zpqx2\" (UID: \"6f4a7ccd-2447-42ca-8ef8-8a7823cdf679\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500515-zpqx2" Feb 02 11:15:00 crc kubenswrapper[4838]: I0202 11:15:00.255365 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lccbk\" (UniqueName: \"kubernetes.io/projected/6f4a7ccd-2447-42ca-8ef8-8a7823cdf679-kube-api-access-lccbk\") pod \"collect-profiles-29500515-zpqx2\" (UID: \"6f4a7ccd-2447-42ca-8ef8-8a7823cdf679\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500515-zpqx2" Feb 02 11:15:00 crc kubenswrapper[4838]: I0202 11:15:00.357284 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lccbk\" (UniqueName: \"kubernetes.io/projected/6f4a7ccd-2447-42ca-8ef8-8a7823cdf679-kube-api-access-lccbk\") pod \"collect-profiles-29500515-zpqx2\" (UID: \"6f4a7ccd-2447-42ca-8ef8-8a7823cdf679\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500515-zpqx2" Feb 02 11:15:00 crc kubenswrapper[4838]: I0202 11:15:00.357388 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6f4a7ccd-2447-42ca-8ef8-8a7823cdf679-config-volume\") pod \"collect-profiles-29500515-zpqx2\" (UID: \"6f4a7ccd-2447-42ca-8ef8-8a7823cdf679\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500515-zpqx2" Feb 02 11:15:00 crc kubenswrapper[4838]: I0202 11:15:00.357467 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6f4a7ccd-2447-42ca-8ef8-8a7823cdf679-secret-volume\") pod \"collect-profiles-29500515-zpqx2\" (UID: \"6f4a7ccd-2447-42ca-8ef8-8a7823cdf679\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500515-zpqx2" Feb 02 11:15:00 crc kubenswrapper[4838]: I0202 11:15:00.358950 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6f4a7ccd-2447-42ca-8ef8-8a7823cdf679-config-volume\") pod \"collect-profiles-29500515-zpqx2\" (UID: \"6f4a7ccd-2447-42ca-8ef8-8a7823cdf679\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500515-zpqx2" Feb 02 11:15:00 crc kubenswrapper[4838]: I0202 11:15:00.363971 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6f4a7ccd-2447-42ca-8ef8-8a7823cdf679-secret-volume\") pod \"collect-profiles-29500515-zpqx2\" (UID: \"6f4a7ccd-2447-42ca-8ef8-8a7823cdf679\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500515-zpqx2" Feb 02 11:15:00 crc kubenswrapper[4838]: I0202 11:15:00.381509 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lccbk\" (UniqueName: \"kubernetes.io/projected/6f4a7ccd-2447-42ca-8ef8-8a7823cdf679-kube-api-access-lccbk\") pod \"collect-profiles-29500515-zpqx2\" (UID: \"6f4a7ccd-2447-42ca-8ef8-8a7823cdf679\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500515-zpqx2" Feb 02 11:15:00 crc kubenswrapper[4838]: I0202 11:15:00.460265 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500515-zpqx2" Feb 02 11:15:03 crc kubenswrapper[4838]: E0202 11:15:03.955760 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified\\\"\"" pod="openstack/openstackclient" podUID="7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406" Feb 02 11:15:05 crc kubenswrapper[4838]: I0202 11:15:05.140252 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500515-zpqx2"] Feb 02 11:15:05 crc kubenswrapper[4838]: I0202 11:15:05.976797 4838 generic.go:334] "Generic (PLEG): container finished" podID="6f4a7ccd-2447-42ca-8ef8-8a7823cdf679" containerID="8173de82fc4d87e160fb7b611534e9fa45d40d653dfe81c4b7cde861aa75875b" exitCode=0 Feb 02 11:15:05 crc kubenswrapper[4838]: I0202 11:15:05.976879 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500515-zpqx2" event={"ID":"6f4a7ccd-2447-42ca-8ef8-8a7823cdf679","Type":"ContainerDied","Data":"8173de82fc4d87e160fb7b611534e9fa45d40d653dfe81c4b7cde861aa75875b"} Feb 02 11:15:05 crc kubenswrapper[4838]: I0202 11:15:05.976910 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500515-zpqx2" event={"ID":"6f4a7ccd-2447-42ca-8ef8-8a7823cdf679","Type":"ContainerStarted","Data":"5f3a80b219ebe999ef2ca994822789e1fe272cfb6f386b62efe18b27470b3038"} Feb 02 11:15:05 crc kubenswrapper[4838]: I0202 11:15:05.978459 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ddd3c545-36ba-4568-9f0b-621669c69e2a","Type":"ContainerStarted","Data":"c0d82f2c7dfc1485a2d9d698b08cf15752144c1a21cca839b6cc77c6942d124e"} Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.313568 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500515-zpqx2" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.403136 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-655dd9ff5-m4vn6"] Feb 02 11:15:07 crc kubenswrapper[4838]: E0202 11:15:07.403607 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f4a7ccd-2447-42ca-8ef8-8a7823cdf679" containerName="collect-profiles" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.403648 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f4a7ccd-2447-42ca-8ef8-8a7823cdf679" containerName="collect-profiles" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.403853 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f4a7ccd-2447-42ca-8ef8-8a7823cdf679" containerName="collect-profiles" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.405134 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.411571 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.411874 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.412050 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.420130 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-655dd9ff5-m4vn6"] Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.483108 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lccbk\" (UniqueName: \"kubernetes.io/projected/6f4a7ccd-2447-42ca-8ef8-8a7823cdf679-kube-api-access-lccbk\") pod \"6f4a7ccd-2447-42ca-8ef8-8a7823cdf679\" (UID: \"6f4a7ccd-2447-42ca-8ef8-8a7823cdf679\") " Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.483174 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6f4a7ccd-2447-42ca-8ef8-8a7823cdf679-secret-volume\") pod \"6f4a7ccd-2447-42ca-8ef8-8a7823cdf679\" (UID: \"6f4a7ccd-2447-42ca-8ef8-8a7823cdf679\") " Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.483369 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6f4a7ccd-2447-42ca-8ef8-8a7823cdf679-config-volume\") pod \"6f4a7ccd-2447-42ca-8ef8-8a7823cdf679\" (UID: \"6f4a7ccd-2447-42ca-8ef8-8a7823cdf679\") " Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.484330 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f4a7ccd-2447-42ca-8ef8-8a7823cdf679-config-volume" (OuterVolumeSpecName: "config-volume") pod "6f4a7ccd-2447-42ca-8ef8-8a7823cdf679" (UID: "6f4a7ccd-2447-42ca-8ef8-8a7823cdf679"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.493878 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f4a7ccd-2447-42ca-8ef8-8a7823cdf679-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6f4a7ccd-2447-42ca-8ef8-8a7823cdf679" (UID: "6f4a7ccd-2447-42ca-8ef8-8a7823cdf679"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.494570 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f4a7ccd-2447-42ca-8ef8-8a7823cdf679-kube-api-access-lccbk" (OuterVolumeSpecName: "kube-api-access-lccbk") pod "6f4a7ccd-2447-42ca-8ef8-8a7823cdf679" (UID: "6f4a7ccd-2447-42ca-8ef8-8a7823cdf679"). InnerVolumeSpecName "kube-api-access-lccbk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.586099 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5af71cb-2380-4977-9a44-ece13d4ce18a-public-tls-certs\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.586150 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5af71cb-2380-4977-9a44-ece13d4ce18a-config-data\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.586178 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5af71cb-2380-4977-9a44-ece13d4ce18a-combined-ca-bundle\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.586443 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f5af71cb-2380-4977-9a44-ece13d4ce18a-etc-swift\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.586497 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88ft6\" (UniqueName: \"kubernetes.io/projected/f5af71cb-2380-4977-9a44-ece13d4ce18a-kube-api-access-88ft6\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.586670 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5af71cb-2380-4977-9a44-ece13d4ce18a-run-httpd\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.586774 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5af71cb-2380-4977-9a44-ece13d4ce18a-log-httpd\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.586802 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5af71cb-2380-4977-9a44-ece13d4ce18a-internal-tls-certs\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.586875 4838 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6f4a7ccd-2447-42ca-8ef8-8a7823cdf679-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.586891 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lccbk\" (UniqueName: \"kubernetes.io/projected/6f4a7ccd-2447-42ca-8ef8-8a7823cdf679-kube-api-access-lccbk\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.586903 4838 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6f4a7ccd-2447-42ca-8ef8-8a7823cdf679-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.688739 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5af71cb-2380-4977-9a44-ece13d4ce18a-run-httpd\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.689164 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5af71cb-2380-4977-9a44-ece13d4ce18a-log-httpd\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.689191 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5af71cb-2380-4977-9a44-ece13d4ce18a-internal-tls-certs\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.689216 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5af71cb-2380-4977-9a44-ece13d4ce18a-public-tls-certs\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.689252 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5af71cb-2380-4977-9a44-ece13d4ce18a-config-data\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.689282 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5af71cb-2380-4977-9a44-ece13d4ce18a-combined-ca-bundle\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.689372 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f5af71cb-2380-4977-9a44-ece13d4ce18a-etc-swift\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.689397 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88ft6\" (UniqueName: \"kubernetes.io/projected/f5af71cb-2380-4977-9a44-ece13d4ce18a-kube-api-access-88ft6\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.690058 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5af71cb-2380-4977-9a44-ece13d4ce18a-run-httpd\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.690413 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5af71cb-2380-4977-9a44-ece13d4ce18a-log-httpd\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.693371 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5af71cb-2380-4977-9a44-ece13d4ce18a-internal-tls-certs\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.693980 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5af71cb-2380-4977-9a44-ece13d4ce18a-combined-ca-bundle\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.702713 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5af71cb-2380-4977-9a44-ece13d4ce18a-config-data\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.704128 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f5af71cb-2380-4977-9a44-ece13d4ce18a-etc-swift\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.712822 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5af71cb-2380-4977-9a44-ece13d4ce18a-public-tls-certs\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.718854 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88ft6\" (UniqueName: \"kubernetes.io/projected/f5af71cb-2380-4977-9a44-ece13d4ce18a-kube-api-access-88ft6\") pod \"swift-proxy-655dd9ff5-m4vn6\" (UID: \"f5af71cb-2380-4977-9a44-ece13d4ce18a\") " pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.729424 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:07 crc kubenswrapper[4838]: I0202 11:15:07.998666 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ddd3c545-36ba-4568-9f0b-621669c69e2a","Type":"ContainerStarted","Data":"bf920d99ec148de4aca59ce62322cab6463f32d19945cedb7468e2d3ab6204a1"} Feb 02 11:15:08 crc kubenswrapper[4838]: I0202 11:15:08.000389 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500515-zpqx2" event={"ID":"6f4a7ccd-2447-42ca-8ef8-8a7823cdf679","Type":"ContainerDied","Data":"5f3a80b219ebe999ef2ca994822789e1fe272cfb6f386b62efe18b27470b3038"} Feb 02 11:15:08 crc kubenswrapper[4838]: I0202 11:15:08.000438 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f3a80b219ebe999ef2ca994822789e1fe272cfb6f386b62efe18b27470b3038" Feb 02 11:15:08 crc kubenswrapper[4838]: I0202 11:15:08.000504 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500515-zpqx2" Feb 02 11:15:08 crc kubenswrapper[4838]: I0202 11:15:08.064028 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-655dd9ff5-m4vn6"] Feb 02 11:15:08 crc kubenswrapper[4838]: W0202 11:15:08.066542 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf5af71cb_2380_4977_9a44_ece13d4ce18a.slice/crio-9f93108a9ea5ce18ec0edac9ebc80aaca6a567fb00f47983bb9bd235536569ac WatchSource:0}: Error finding container 9f93108a9ea5ce18ec0edac9ebc80aaca6a567fb00f47983bb9bd235536569ac: Status 404 returned error can't find the container with id 9f93108a9ea5ce18ec0edac9ebc80aaca6a567fb00f47983bb9bd235536569ac Feb 02 11:15:09 crc kubenswrapper[4838]: I0202 11:15:09.010053 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-655dd9ff5-m4vn6" event={"ID":"f5af71cb-2380-4977-9a44-ece13d4ce18a","Type":"ContainerStarted","Data":"ce05c4f8b58a57aa5c6ddc35130474c37dd1762588e2d5560e2051a966f7915c"} Feb 02 11:15:09 crc kubenswrapper[4838]: I0202 11:15:09.010337 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-655dd9ff5-m4vn6" event={"ID":"f5af71cb-2380-4977-9a44-ece13d4ce18a","Type":"ContainerStarted","Data":"c67016308db82d504e414f28c92f22bfd9254beaec6c6765de30aa5734c7ab4c"} Feb 02 11:15:09 crc kubenswrapper[4838]: I0202 11:15:09.010347 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-655dd9ff5-m4vn6" event={"ID":"f5af71cb-2380-4977-9a44-ece13d4ce18a","Type":"ContainerStarted","Data":"9f93108a9ea5ce18ec0edac9ebc80aaca6a567fb00f47983bb9bd235536569ac"} Feb 02 11:15:09 crc kubenswrapper[4838]: I0202 11:15:09.011442 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:09 crc kubenswrapper[4838]: I0202 11:15:09.011485 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:09 crc kubenswrapper[4838]: I0202 11:15:09.040339 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-655dd9ff5-m4vn6" podStartSLOduration=2.04032237 podStartE2EDuration="2.04032237s" podCreationTimestamp="2026-02-02 11:15:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:15:09.031340523 +0000 UTC m=+1303.368441551" watchObservedRunningTime="2026-02-02 11:15:09.04032237 +0000 UTC m=+1303.377423388" Feb 02 11:15:15 crc kubenswrapper[4838]: I0202 11:15:15.068136 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ddd3c545-36ba-4568-9f0b-621669c69e2a","Type":"ContainerStarted","Data":"761a025abada7673af5030314a33e8aac53938f9b06f9b97382234412107e246"} Feb 02 11:15:16 crc kubenswrapper[4838]: I0202 11:15:16.080057 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 11:15:16 crc kubenswrapper[4838]: I0202 11:15:16.118198 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.312228271 podStartE2EDuration="46.11817033s" podCreationTimestamp="2026-02-02 11:14:30 +0000 UTC" firstStartedPulling="2026-02-02 11:14:30.813254853 +0000 UTC m=+1265.150355881" lastFinishedPulling="2026-02-02 11:15:14.619196912 +0000 UTC m=+1308.956297940" observedRunningTime="2026-02-02 11:15:16.107728574 +0000 UTC m=+1310.444829632" watchObservedRunningTime="2026-02-02 11:15:16.11817033 +0000 UTC m=+1310.455271358" Feb 02 11:15:17 crc kubenswrapper[4838]: I0202 11:15:17.736427 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:17 crc kubenswrapper[4838]: I0202 11:15:17.740394 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-655dd9ff5-m4vn6" Feb 02 11:15:22 crc kubenswrapper[4838]: I0202 11:15:22.132392 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406","Type":"ContainerStarted","Data":"420260da96aee9023ab4b18232f394b0854fdd30b01b69325bbb93053ed7ff41"} Feb 02 11:15:22 crc kubenswrapper[4838]: I0202 11:15:22.170273 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.381796602 podStartE2EDuration="55.170248629s" podCreationTimestamp="2026-02-02 11:14:27 +0000 UTC" firstStartedPulling="2026-02-02 11:14:27.99287455 +0000 UTC m=+1262.329975578" lastFinishedPulling="2026-02-02 11:15:20.781326577 +0000 UTC m=+1315.118427605" observedRunningTime="2026-02-02 11:15:22.158121049 +0000 UTC m=+1316.495222097" watchObservedRunningTime="2026-02-02 11:15:22.170248629 +0000 UTC m=+1316.507349657" Feb 02 11:15:23 crc kubenswrapper[4838]: I0202 11:15:23.961723 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:15:23 crc kubenswrapper[4838]: I0202 11:15:23.963779 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ddd3c545-36ba-4568-9f0b-621669c69e2a" containerName="ceilometer-central-agent" containerID="cri-o://318e660dfb9201c80432337a904a8ccfcc05737d11a03ebc12ae6a1a04174aeb" gracePeriod=30 Feb 02 11:15:23 crc kubenswrapper[4838]: I0202 11:15:23.963815 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ddd3c545-36ba-4568-9f0b-621669c69e2a" containerName="sg-core" containerID="cri-o://bf920d99ec148de4aca59ce62322cab6463f32d19945cedb7468e2d3ab6204a1" gracePeriod=30 Feb 02 11:15:23 crc kubenswrapper[4838]: I0202 11:15:23.964150 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ddd3c545-36ba-4568-9f0b-621669c69e2a" containerName="proxy-httpd" containerID="cri-o://761a025abada7673af5030314a33e8aac53938f9b06f9b97382234412107e246" gracePeriod=30 Feb 02 11:15:23 crc kubenswrapper[4838]: I0202 11:15:23.963912 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ddd3c545-36ba-4568-9f0b-621669c69e2a" containerName="ceilometer-notification-agent" containerID="cri-o://c0d82f2c7dfc1485a2d9d698b08cf15752144c1a21cca839b6cc77c6942d124e" gracePeriod=30 Feb 02 11:15:24 crc kubenswrapper[4838]: I0202 11:15:24.153718 4838 generic.go:334] "Generic (PLEG): container finished" podID="ddd3c545-36ba-4568-9f0b-621669c69e2a" containerID="761a025abada7673af5030314a33e8aac53938f9b06f9b97382234412107e246" exitCode=0 Feb 02 11:15:24 crc kubenswrapper[4838]: I0202 11:15:24.153753 4838 generic.go:334] "Generic (PLEG): container finished" podID="ddd3c545-36ba-4568-9f0b-621669c69e2a" containerID="bf920d99ec148de4aca59ce62322cab6463f32d19945cedb7468e2d3ab6204a1" exitCode=2 Feb 02 11:15:24 crc kubenswrapper[4838]: I0202 11:15:24.153771 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ddd3c545-36ba-4568-9f0b-621669c69e2a","Type":"ContainerDied","Data":"761a025abada7673af5030314a33e8aac53938f9b06f9b97382234412107e246"} Feb 02 11:15:24 crc kubenswrapper[4838]: I0202 11:15:24.153794 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ddd3c545-36ba-4568-9f0b-621669c69e2a","Type":"ContainerDied","Data":"bf920d99ec148de4aca59ce62322cab6463f32d19945cedb7468e2d3ab6204a1"} Feb 02 11:15:25 crc kubenswrapper[4838]: I0202 11:15:25.164289 4838 generic.go:334] "Generic (PLEG): container finished" podID="ddd3c545-36ba-4568-9f0b-621669c69e2a" containerID="318e660dfb9201c80432337a904a8ccfcc05737d11a03ebc12ae6a1a04174aeb" exitCode=0 Feb 02 11:15:25 crc kubenswrapper[4838]: I0202 11:15:25.164349 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ddd3c545-36ba-4568-9f0b-621669c69e2a","Type":"ContainerDied","Data":"318e660dfb9201c80432337a904a8ccfcc05737d11a03ebc12ae6a1a04174aeb"} Feb 02 11:15:26 crc kubenswrapper[4838]: I0202 11:15:26.597373 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:15:26 crc kubenswrapper[4838]: I0202 11:15:26.613017 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-b4ffff5fb-46ldv" Feb 02 11:15:28 crc kubenswrapper[4838]: I0202 11:15:28.814040 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:15:28 crc kubenswrapper[4838]: I0202 11:15:28.891681 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-combined-ca-bundle\") pod \"ddd3c545-36ba-4568-9f0b-621669c69e2a\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " Feb 02 11:15:28 crc kubenswrapper[4838]: I0202 11:15:28.891760 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-config-data\") pod \"ddd3c545-36ba-4568-9f0b-621669c69e2a\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " Feb 02 11:15:28 crc kubenswrapper[4838]: I0202 11:15:28.891965 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ddd3c545-36ba-4568-9f0b-621669c69e2a-run-httpd\") pod \"ddd3c545-36ba-4568-9f0b-621669c69e2a\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " Feb 02 11:15:28 crc kubenswrapper[4838]: I0202 11:15:28.891996 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-scripts\") pod \"ddd3c545-36ba-4568-9f0b-621669c69e2a\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " Feb 02 11:15:28 crc kubenswrapper[4838]: I0202 11:15:28.892022 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6nbs6\" (UniqueName: \"kubernetes.io/projected/ddd3c545-36ba-4568-9f0b-621669c69e2a-kube-api-access-6nbs6\") pod \"ddd3c545-36ba-4568-9f0b-621669c69e2a\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " Feb 02 11:15:28 crc kubenswrapper[4838]: I0202 11:15:28.892060 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ddd3c545-36ba-4568-9f0b-621669c69e2a-log-httpd\") pod \"ddd3c545-36ba-4568-9f0b-621669c69e2a\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " Feb 02 11:15:28 crc kubenswrapper[4838]: I0202 11:15:28.892125 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-sg-core-conf-yaml\") pod \"ddd3c545-36ba-4568-9f0b-621669c69e2a\" (UID: \"ddd3c545-36ba-4568-9f0b-621669c69e2a\") " Feb 02 11:15:28 crc kubenswrapper[4838]: I0202 11:15:28.892855 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ddd3c545-36ba-4568-9f0b-621669c69e2a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ddd3c545-36ba-4568-9f0b-621669c69e2a" (UID: "ddd3c545-36ba-4568-9f0b-621669c69e2a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:15:28 crc kubenswrapper[4838]: I0202 11:15:28.892996 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ddd3c545-36ba-4568-9f0b-621669c69e2a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ddd3c545-36ba-4568-9f0b-621669c69e2a" (UID: "ddd3c545-36ba-4568-9f0b-621669c69e2a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:15:28 crc kubenswrapper[4838]: I0202 11:15:28.893432 4838 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ddd3c545-36ba-4568-9f0b-621669c69e2a-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:28 crc kubenswrapper[4838]: I0202 11:15:28.893463 4838 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ddd3c545-36ba-4568-9f0b-621669c69e2a-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:28 crc kubenswrapper[4838]: I0202 11:15:28.897994 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddd3c545-36ba-4568-9f0b-621669c69e2a-kube-api-access-6nbs6" (OuterVolumeSpecName: "kube-api-access-6nbs6") pod "ddd3c545-36ba-4568-9f0b-621669c69e2a" (UID: "ddd3c545-36ba-4568-9f0b-621669c69e2a"). InnerVolumeSpecName "kube-api-access-6nbs6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:15:28 crc kubenswrapper[4838]: I0202 11:15:28.898712 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-scripts" (OuterVolumeSpecName: "scripts") pod "ddd3c545-36ba-4568-9f0b-621669c69e2a" (UID: "ddd3c545-36ba-4568-9f0b-621669c69e2a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:15:28 crc kubenswrapper[4838]: I0202 11:15:28.952883 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ddd3c545-36ba-4568-9f0b-621669c69e2a" (UID: "ddd3c545-36ba-4568-9f0b-621669c69e2a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:15:28 crc kubenswrapper[4838]: I0202 11:15:28.975856 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ddd3c545-36ba-4568-9f0b-621669c69e2a" (UID: "ddd3c545-36ba-4568-9f0b-621669c69e2a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:15:28 crc kubenswrapper[4838]: I0202 11:15:28.994942 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:28 crc kubenswrapper[4838]: I0202 11:15:28.994972 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6nbs6\" (UniqueName: \"kubernetes.io/projected/ddd3c545-36ba-4568-9f0b-621669c69e2a-kube-api-access-6nbs6\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:28 crc kubenswrapper[4838]: I0202 11:15:28.994983 4838 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:28 crc kubenswrapper[4838]: I0202 11:15:28.994991 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.000480 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-config-data" (OuterVolumeSpecName: "config-data") pod "ddd3c545-36ba-4568-9f0b-621669c69e2a" (UID: "ddd3c545-36ba-4568-9f0b-621669c69e2a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.096793 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ddd3c545-36ba-4568-9f0b-621669c69e2a-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.211886 4838 generic.go:334] "Generic (PLEG): container finished" podID="ddd3c545-36ba-4568-9f0b-621669c69e2a" containerID="c0d82f2c7dfc1485a2d9d698b08cf15752144c1a21cca839b6cc77c6942d124e" exitCode=0 Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.211953 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ddd3c545-36ba-4568-9f0b-621669c69e2a","Type":"ContainerDied","Data":"c0d82f2c7dfc1485a2d9d698b08cf15752144c1a21cca839b6cc77c6942d124e"} Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.211999 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.212019 4838 scope.go:117] "RemoveContainer" containerID="761a025abada7673af5030314a33e8aac53938f9b06f9b97382234412107e246" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.212007 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ddd3c545-36ba-4568-9f0b-621669c69e2a","Type":"ContainerDied","Data":"a170a95e892f52ade3eff686fccb1c2d0dfbee84a6e2c528ea902136c42e007c"} Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.231042 4838 scope.go:117] "RemoveContainer" containerID="bf920d99ec148de4aca59ce62322cab6463f32d19945cedb7468e2d3ab6204a1" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.242917 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.265003 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.276771 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:15:29 crc kubenswrapper[4838]: E0202 11:15:29.277493 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddd3c545-36ba-4568-9f0b-621669c69e2a" containerName="ceilometer-notification-agent" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.277517 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddd3c545-36ba-4568-9f0b-621669c69e2a" containerName="ceilometer-notification-agent" Feb 02 11:15:29 crc kubenswrapper[4838]: E0202 11:15:29.277538 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddd3c545-36ba-4568-9f0b-621669c69e2a" containerName="ceilometer-central-agent" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.277545 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddd3c545-36ba-4568-9f0b-621669c69e2a" containerName="ceilometer-central-agent" Feb 02 11:15:29 crc kubenswrapper[4838]: E0202 11:15:29.277558 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddd3c545-36ba-4568-9f0b-621669c69e2a" containerName="proxy-httpd" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.277569 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddd3c545-36ba-4568-9f0b-621669c69e2a" containerName="proxy-httpd" Feb 02 11:15:29 crc kubenswrapper[4838]: E0202 11:15:29.277587 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddd3c545-36ba-4568-9f0b-621669c69e2a" containerName="sg-core" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.277593 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddd3c545-36ba-4568-9f0b-621669c69e2a" containerName="sg-core" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.279400 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddd3c545-36ba-4568-9f0b-621669c69e2a" containerName="sg-core" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.279480 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddd3c545-36ba-4568-9f0b-621669c69e2a" containerName="ceilometer-central-agent" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.279518 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddd3c545-36ba-4568-9f0b-621669c69e2a" containerName="ceilometer-notification-agent" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.279556 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddd3c545-36ba-4568-9f0b-621669c69e2a" containerName="proxy-httpd" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.285878 4838 scope.go:117] "RemoveContainer" containerID="c0d82f2c7dfc1485a2d9d698b08cf15752144c1a21cca839b6cc77c6942d124e" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.286550 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.290493 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.292848 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.294908 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.332389 4838 scope.go:117] "RemoveContainer" containerID="318e660dfb9201c80432337a904a8ccfcc05737d11a03ebc12ae6a1a04174aeb" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.354438 4838 scope.go:117] "RemoveContainer" containerID="761a025abada7673af5030314a33e8aac53938f9b06f9b97382234412107e246" Feb 02 11:15:29 crc kubenswrapper[4838]: E0202 11:15:29.354952 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"761a025abada7673af5030314a33e8aac53938f9b06f9b97382234412107e246\": container with ID starting with 761a025abada7673af5030314a33e8aac53938f9b06f9b97382234412107e246 not found: ID does not exist" containerID="761a025abada7673af5030314a33e8aac53938f9b06f9b97382234412107e246" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.354986 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"761a025abada7673af5030314a33e8aac53938f9b06f9b97382234412107e246"} err="failed to get container status \"761a025abada7673af5030314a33e8aac53938f9b06f9b97382234412107e246\": rpc error: code = NotFound desc = could not find container \"761a025abada7673af5030314a33e8aac53938f9b06f9b97382234412107e246\": container with ID starting with 761a025abada7673af5030314a33e8aac53938f9b06f9b97382234412107e246 not found: ID does not exist" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.355012 4838 scope.go:117] "RemoveContainer" containerID="bf920d99ec148de4aca59ce62322cab6463f32d19945cedb7468e2d3ab6204a1" Feb 02 11:15:29 crc kubenswrapper[4838]: E0202 11:15:29.355446 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf920d99ec148de4aca59ce62322cab6463f32d19945cedb7468e2d3ab6204a1\": container with ID starting with bf920d99ec148de4aca59ce62322cab6463f32d19945cedb7468e2d3ab6204a1 not found: ID does not exist" containerID="bf920d99ec148de4aca59ce62322cab6463f32d19945cedb7468e2d3ab6204a1" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.355466 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf920d99ec148de4aca59ce62322cab6463f32d19945cedb7468e2d3ab6204a1"} err="failed to get container status \"bf920d99ec148de4aca59ce62322cab6463f32d19945cedb7468e2d3ab6204a1\": rpc error: code = NotFound desc = could not find container \"bf920d99ec148de4aca59ce62322cab6463f32d19945cedb7468e2d3ab6204a1\": container with ID starting with bf920d99ec148de4aca59ce62322cab6463f32d19945cedb7468e2d3ab6204a1 not found: ID does not exist" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.355481 4838 scope.go:117] "RemoveContainer" containerID="c0d82f2c7dfc1485a2d9d698b08cf15752144c1a21cca839b6cc77c6942d124e" Feb 02 11:15:29 crc kubenswrapper[4838]: E0202 11:15:29.355789 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0d82f2c7dfc1485a2d9d698b08cf15752144c1a21cca839b6cc77c6942d124e\": container with ID starting with c0d82f2c7dfc1485a2d9d698b08cf15752144c1a21cca839b6cc77c6942d124e not found: ID does not exist" containerID="c0d82f2c7dfc1485a2d9d698b08cf15752144c1a21cca839b6cc77c6942d124e" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.355807 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0d82f2c7dfc1485a2d9d698b08cf15752144c1a21cca839b6cc77c6942d124e"} err="failed to get container status \"c0d82f2c7dfc1485a2d9d698b08cf15752144c1a21cca839b6cc77c6942d124e\": rpc error: code = NotFound desc = could not find container \"c0d82f2c7dfc1485a2d9d698b08cf15752144c1a21cca839b6cc77c6942d124e\": container with ID starting with c0d82f2c7dfc1485a2d9d698b08cf15752144c1a21cca839b6cc77c6942d124e not found: ID does not exist" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.355823 4838 scope.go:117] "RemoveContainer" containerID="318e660dfb9201c80432337a904a8ccfcc05737d11a03ebc12ae6a1a04174aeb" Feb 02 11:15:29 crc kubenswrapper[4838]: E0202 11:15:29.356070 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"318e660dfb9201c80432337a904a8ccfcc05737d11a03ebc12ae6a1a04174aeb\": container with ID starting with 318e660dfb9201c80432337a904a8ccfcc05737d11a03ebc12ae6a1a04174aeb not found: ID does not exist" containerID="318e660dfb9201c80432337a904a8ccfcc05737d11a03ebc12ae6a1a04174aeb" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.356089 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"318e660dfb9201c80432337a904a8ccfcc05737d11a03ebc12ae6a1a04174aeb"} err="failed to get container status \"318e660dfb9201c80432337a904a8ccfcc05737d11a03ebc12ae6a1a04174aeb\": rpc error: code = NotFound desc = could not find container \"318e660dfb9201c80432337a904a8ccfcc05737d11a03ebc12ae6a1a04174aeb\": container with ID starting with 318e660dfb9201c80432337a904a8ccfcc05737d11a03ebc12ae6a1a04174aeb not found: ID does not exist" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.400970 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b95b77a-01f4-4847-a86b-d5c8564df612-run-httpd\") pod \"ceilometer-0\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.401123 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.401151 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-config-data\") pod \"ceilometer-0\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.401339 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b95b77a-01f4-4847-a86b-d5c8564df612-log-httpd\") pod \"ceilometer-0\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.401525 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pknc\" (UniqueName: \"kubernetes.io/projected/6b95b77a-01f4-4847-a86b-d5c8564df612-kube-api-access-4pknc\") pod \"ceilometer-0\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.401661 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-scripts\") pod \"ceilometer-0\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.401720 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.503376 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-scripts\") pod \"ceilometer-0\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.503449 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.503492 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b95b77a-01f4-4847-a86b-d5c8564df612-run-httpd\") pod \"ceilometer-0\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.503567 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.503597 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-config-data\") pod \"ceilometer-0\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.503682 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b95b77a-01f4-4847-a86b-d5c8564df612-log-httpd\") pod \"ceilometer-0\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.503724 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pknc\" (UniqueName: \"kubernetes.io/projected/6b95b77a-01f4-4847-a86b-d5c8564df612-kube-api-access-4pknc\") pod \"ceilometer-0\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.504241 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b95b77a-01f4-4847-a86b-d5c8564df612-run-httpd\") pod \"ceilometer-0\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.504272 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b95b77a-01f4-4847-a86b-d5c8564df612-log-httpd\") pod \"ceilometer-0\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.507941 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-config-data\") pod \"ceilometer-0\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.508262 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.510209 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-scripts\") pod \"ceilometer-0\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.514470 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.527394 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pknc\" (UniqueName: \"kubernetes.io/projected/6b95b77a-01f4-4847-a86b-d5c8564df612-kube-api-access-4pknc\") pod \"ceilometer-0\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " pod="openstack/ceilometer-0" Feb 02 11:15:29 crc kubenswrapper[4838]: I0202 11:15:29.604881 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:15:30 crc kubenswrapper[4838]: W0202 11:15:30.064156 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b95b77a_01f4_4847_a86b_d5c8564df612.slice/crio-fdfe40872090be9ef50986ceee44cfea332e63a7f0a97392db5699f067442a1c WatchSource:0}: Error finding container fdfe40872090be9ef50986ceee44cfea332e63a7f0a97392db5699f067442a1c: Status 404 returned error can't find the container with id fdfe40872090be9ef50986ceee44cfea332e63a7f0a97392db5699f067442a1c Feb 02 11:15:30 crc kubenswrapper[4838]: I0202 11:15:30.069303 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:15:30 crc kubenswrapper[4838]: I0202 11:15:30.222022 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b95b77a-01f4-4847-a86b-d5c8564df612","Type":"ContainerStarted","Data":"fdfe40872090be9ef50986ceee44cfea332e63a7f0a97392db5699f067442a1c"} Feb 02 11:15:30 crc kubenswrapper[4838]: I0202 11:15:30.521929 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddd3c545-36ba-4568-9f0b-621669c69e2a" path="/var/lib/kubelet/pods/ddd3c545-36ba-4568-9f0b-621669c69e2a/volumes" Feb 02 11:15:35 crc kubenswrapper[4838]: I0202 11:15:35.260452 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b95b77a-01f4-4847-a86b-d5c8564df612","Type":"ContainerStarted","Data":"e9ec1a2d93a4032109413aac85c15fa3595dd572c03937d48199246ac7740224"} Feb 02 11:15:37 crc kubenswrapper[4838]: I0202 11:15:37.281653 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b95b77a-01f4-4847-a86b-d5c8564df612","Type":"ContainerStarted","Data":"2eaa60014fb589380fcb5b96a3cc14d9d1db9cd572787f7ffdba25d16bb92eb7"} Feb 02 11:15:42 crc kubenswrapper[4838]: I0202 11:15:42.330516 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b95b77a-01f4-4847-a86b-d5c8564df612","Type":"ContainerStarted","Data":"d14a4e3745ae58787229a175b1945feb68d46aa6aed7089bf2d9a2a18e6dd383"} Feb 02 11:15:46 crc kubenswrapper[4838]: I0202 11:15:46.172363 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 11:15:46 crc kubenswrapper[4838]: I0202 11:15:46.173466 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693" containerName="glance-httpd" containerID="cri-o://7d1e6d3b0d12ec5eacfcb88d9ce1001917c904b0a7bd5656688f8bef53536b25" gracePeriod=30 Feb 02 11:15:46 crc kubenswrapper[4838]: I0202 11:15:46.174085 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693" containerName="glance-log" containerID="cri-o://3d0181f005eaa235a74d3c353269708285f0948e183cd37a5b3b101c16555cc3" gracePeriod=30 Feb 02 11:15:46 crc kubenswrapper[4838]: I0202 11:15:46.365445 4838 generic.go:334] "Generic (PLEG): container finished" podID="b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693" containerID="3d0181f005eaa235a74d3c353269708285f0948e183cd37a5b3b101c16555cc3" exitCode=143 Feb 02 11:15:46 crc kubenswrapper[4838]: I0202 11:15:46.365497 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693","Type":"ContainerDied","Data":"3d0181f005eaa235a74d3c353269708285f0948e183cd37a5b3b101c16555cc3"} Feb 02 11:15:48 crc kubenswrapper[4838]: I0202 11:15:48.392822 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b95b77a-01f4-4847-a86b-d5c8564df612","Type":"ContainerStarted","Data":"85e8d418e9aca743b463c26d63f445c6079b099d946e2eb61f58250945e69897"} Feb 02 11:15:48 crc kubenswrapper[4838]: I0202 11:15:48.393822 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 11:15:48 crc kubenswrapper[4838]: I0202 11:15:48.421723 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.954178641 podStartE2EDuration="19.421705958s" podCreationTimestamp="2026-02-02 11:15:29 +0000 UTC" firstStartedPulling="2026-02-02 11:15:30.066046744 +0000 UTC m=+1324.403147772" lastFinishedPulling="2026-02-02 11:15:47.533574061 +0000 UTC m=+1341.870675089" observedRunningTime="2026-02-02 11:15:48.411835348 +0000 UTC m=+1342.748936396" watchObservedRunningTime="2026-02-02 11:15:48.421705958 +0000 UTC m=+1342.758806986" Feb 02 11:15:48 crc kubenswrapper[4838]: I0202 11:15:48.986998 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 11:15:48 crc kubenswrapper[4838]: I0202 11:15:48.987258 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="785718c1-d706-4e71-8250-77f8326207d6" containerName="glance-log" containerID="cri-o://cd8ce6306c3129ac1384b3de5bee70d6589d86293795dd91eaabda36bf5587e2" gracePeriod=30 Feb 02 11:15:48 crc kubenswrapper[4838]: I0202 11:15:48.987402 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="785718c1-d706-4e71-8250-77f8326207d6" containerName="glance-httpd" containerID="cri-o://cb31ed3f580613dfeda4c2802d70297df8ef3ac8bcbeeb274398e529a1d999be" gracePeriod=30 Feb 02 11:15:49 crc kubenswrapper[4838]: I0202 11:15:49.197610 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:15:49 crc kubenswrapper[4838]: I0202 11:15:49.402225 4838 generic.go:334] "Generic (PLEG): container finished" podID="b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693" containerID="7d1e6d3b0d12ec5eacfcb88d9ce1001917c904b0a7bd5656688f8bef53536b25" exitCode=0 Feb 02 11:15:49 crc kubenswrapper[4838]: I0202 11:15:49.402303 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693","Type":"ContainerDied","Data":"7d1e6d3b0d12ec5eacfcb88d9ce1001917c904b0a7bd5656688f8bef53536b25"} Feb 02 11:15:49 crc kubenswrapper[4838]: I0202 11:15:49.404781 4838 generic.go:334] "Generic (PLEG): container finished" podID="785718c1-d706-4e71-8250-77f8326207d6" containerID="cd8ce6306c3129ac1384b3de5bee70d6589d86293795dd91eaabda36bf5587e2" exitCode=143 Feb 02 11:15:49 crc kubenswrapper[4838]: I0202 11:15:49.404840 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"785718c1-d706-4e71-8250-77f8326207d6","Type":"ContainerDied","Data":"cd8ce6306c3129ac1384b3de5bee70d6589d86293795dd91eaabda36bf5587e2"} Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.128876 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.182158 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.182267 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-logs\") pod \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.182344 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-scripts\") pod \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.182363 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-config-data\") pod \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.182392 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-public-tls-certs\") pod \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.182470 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltvvb\" (UniqueName: \"kubernetes.io/projected/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-kube-api-access-ltvvb\") pod \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.182503 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-combined-ca-bundle\") pod \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.182538 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-httpd-run\") pod \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\" (UID: \"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693\") " Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.183324 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693" (UID: "b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.183694 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-logs" (OuterVolumeSpecName: "logs") pod "b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693" (UID: "b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.191574 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693" (UID: "b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.192410 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-kube-api-access-ltvvb" (OuterVolumeSpecName: "kube-api-access-ltvvb") pod "b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693" (UID: "b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693"). InnerVolumeSpecName "kube-api-access-ltvvb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.211934 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-scripts" (OuterVolumeSpecName: "scripts") pod "b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693" (UID: "b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.240986 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693" (UID: "b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.256753 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-config-data" (OuterVolumeSpecName: "config-data") pod "b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693" (UID: "b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.272753 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693" (UID: "b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.283990 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltvvb\" (UniqueName: \"kubernetes.io/projected/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-kube-api-access-ltvvb\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.284027 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.284037 4838 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.284064 4838 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.284075 4838 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-logs\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.284085 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.284096 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.284103 4838 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.307766 4838 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.386027 4838 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.418601 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693","Type":"ContainerDied","Data":"86a5e85813365269f41d680d587b76f662614b4771f4a557e8318cddf2156f77"} Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.418647 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.418671 4838 scope.go:117] "RemoveContainer" containerID="7d1e6d3b0d12ec5eacfcb88d9ce1001917c904b0a7bd5656688f8bef53536b25" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.418788 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b95b77a-01f4-4847-a86b-d5c8564df612" containerName="ceilometer-central-agent" containerID="cri-o://e9ec1a2d93a4032109413aac85c15fa3595dd572c03937d48199246ac7740224" gracePeriod=30 Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.418851 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b95b77a-01f4-4847-a86b-d5c8564df612" containerName="sg-core" containerID="cri-o://d14a4e3745ae58787229a175b1945feb68d46aa6aed7089bf2d9a2a18e6dd383" gracePeriod=30 Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.418880 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b95b77a-01f4-4847-a86b-d5c8564df612" containerName="proxy-httpd" containerID="cri-o://85e8d418e9aca743b463c26d63f445c6079b099d946e2eb61f58250945e69897" gracePeriod=30 Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.418851 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b95b77a-01f4-4847-a86b-d5c8564df612" containerName="ceilometer-notification-agent" containerID="cri-o://2eaa60014fb589380fcb5b96a3cc14d9d1db9cd572787f7ffdba25d16bb92eb7" gracePeriod=30 Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.447645 4838 scope.go:117] "RemoveContainer" containerID="3d0181f005eaa235a74d3c353269708285f0948e183cd37a5b3b101c16555cc3" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.451543 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.465416 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.485135 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 11:15:50 crc kubenswrapper[4838]: E0202 11:15:50.485544 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693" containerName="glance-httpd" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.485564 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693" containerName="glance-httpd" Feb 02 11:15:50 crc kubenswrapper[4838]: E0202 11:15:50.485592 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693" containerName="glance-log" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.485601 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693" containerName="glance-log" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.485825 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693" containerName="glance-log" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.485896 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693" containerName="glance-httpd" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.486919 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.490210 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.490248 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.538452 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693" path="/var/lib/kubelet/pods/b3adf1bb-98e2-4ab3-bd65-ac4b7b32c693/volumes" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.544667 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.590250 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5110c446-0e66-4098-b30a-dfbdbc8e5fbe-scripts\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.590738 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcgsn\" (UniqueName: \"kubernetes.io/projected/5110c446-0e66-4098-b30a-dfbdbc8e5fbe-kube-api-access-zcgsn\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.591476 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.591503 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5110c446-0e66-4098-b30a-dfbdbc8e5fbe-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.592468 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5110c446-0e66-4098-b30a-dfbdbc8e5fbe-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.592575 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5110c446-0e66-4098-b30a-dfbdbc8e5fbe-logs\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.592630 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5110c446-0e66-4098-b30a-dfbdbc8e5fbe-config-data\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.592653 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5110c446-0e66-4098-b30a-dfbdbc8e5fbe-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.693850 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.693901 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5110c446-0e66-4098-b30a-dfbdbc8e5fbe-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.694043 4838 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.694530 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5110c446-0e66-4098-b30a-dfbdbc8e5fbe-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.694785 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5110c446-0e66-4098-b30a-dfbdbc8e5fbe-logs\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.694886 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5110c446-0e66-4098-b30a-dfbdbc8e5fbe-config-data\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.694960 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5110c446-0e66-4098-b30a-dfbdbc8e5fbe-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.695136 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5110c446-0e66-4098-b30a-dfbdbc8e5fbe-scripts\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.695199 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcgsn\" (UniqueName: \"kubernetes.io/projected/5110c446-0e66-4098-b30a-dfbdbc8e5fbe-kube-api-access-zcgsn\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.695883 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5110c446-0e66-4098-b30a-dfbdbc8e5fbe-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.696374 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5110c446-0e66-4098-b30a-dfbdbc8e5fbe-logs\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.699593 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5110c446-0e66-4098-b30a-dfbdbc8e5fbe-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.699640 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5110c446-0e66-4098-b30a-dfbdbc8e5fbe-scripts\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.699730 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5110c446-0e66-4098-b30a-dfbdbc8e5fbe-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.700228 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5110c446-0e66-4098-b30a-dfbdbc8e5fbe-config-data\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.712462 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcgsn\" (UniqueName: \"kubernetes.io/projected/5110c446-0e66-4098-b30a-dfbdbc8e5fbe-kube-api-access-zcgsn\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.727168 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"5110c446-0e66-4098-b30a-dfbdbc8e5fbe\") " pod="openstack/glance-default-external-api-0" Feb 02 11:15:50 crc kubenswrapper[4838]: I0202 11:15:50.804022 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 11:15:51 crc kubenswrapper[4838]: W0202 11:15:51.303066 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5110c446_0e66_4098_b30a_dfbdbc8e5fbe.slice/crio-027dd558a8f6760a6c5c2b2af8c2c0c159b1a0ccdcfd48f8fa975fd45dafe156 WatchSource:0}: Error finding container 027dd558a8f6760a6c5c2b2af8c2c0c159b1a0ccdcfd48f8fa975fd45dafe156: Status 404 returned error can't find the container with id 027dd558a8f6760a6c5c2b2af8c2c0c159b1a0ccdcfd48f8fa975fd45dafe156 Feb 02 11:15:51 crc kubenswrapper[4838]: I0202 11:15:51.306750 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 11:15:51 crc kubenswrapper[4838]: I0202 11:15:51.439884 4838 generic.go:334] "Generic (PLEG): container finished" podID="6b95b77a-01f4-4847-a86b-d5c8564df612" containerID="85e8d418e9aca743b463c26d63f445c6079b099d946e2eb61f58250945e69897" exitCode=0 Feb 02 11:15:51 crc kubenswrapper[4838]: I0202 11:15:51.439917 4838 generic.go:334] "Generic (PLEG): container finished" podID="6b95b77a-01f4-4847-a86b-d5c8564df612" containerID="d14a4e3745ae58787229a175b1945feb68d46aa6aed7089bf2d9a2a18e6dd383" exitCode=2 Feb 02 11:15:51 crc kubenswrapper[4838]: I0202 11:15:51.439924 4838 generic.go:334] "Generic (PLEG): container finished" podID="6b95b77a-01f4-4847-a86b-d5c8564df612" containerID="e9ec1a2d93a4032109413aac85c15fa3595dd572c03937d48199246ac7740224" exitCode=0 Feb 02 11:15:51 crc kubenswrapper[4838]: I0202 11:15:51.439963 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b95b77a-01f4-4847-a86b-d5c8564df612","Type":"ContainerDied","Data":"85e8d418e9aca743b463c26d63f445c6079b099d946e2eb61f58250945e69897"} Feb 02 11:15:51 crc kubenswrapper[4838]: I0202 11:15:51.439990 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b95b77a-01f4-4847-a86b-d5c8564df612","Type":"ContainerDied","Data":"d14a4e3745ae58787229a175b1945feb68d46aa6aed7089bf2d9a2a18e6dd383"} Feb 02 11:15:51 crc kubenswrapper[4838]: I0202 11:15:51.440000 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b95b77a-01f4-4847-a86b-d5c8564df612","Type":"ContainerDied","Data":"e9ec1a2d93a4032109413aac85c15fa3595dd572c03937d48199246ac7740224"} Feb 02 11:15:51 crc kubenswrapper[4838]: I0202 11:15:51.441517 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5110c446-0e66-4098-b30a-dfbdbc8e5fbe","Type":"ContainerStarted","Data":"027dd558a8f6760a6c5c2b2af8c2c0c159b1a0ccdcfd48f8fa975fd45dafe156"} Feb 02 11:15:52 crc kubenswrapper[4838]: I0202 11:15:52.453481 4838 generic.go:334] "Generic (PLEG): container finished" podID="785718c1-d706-4e71-8250-77f8326207d6" containerID="cb31ed3f580613dfeda4c2802d70297df8ef3ac8bcbeeb274398e529a1d999be" exitCode=0 Feb 02 11:15:52 crc kubenswrapper[4838]: I0202 11:15:52.454164 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"785718c1-d706-4e71-8250-77f8326207d6","Type":"ContainerDied","Data":"cb31ed3f580613dfeda4c2802d70297df8ef3ac8bcbeeb274398e529a1d999be"} Feb 02 11:15:52 crc kubenswrapper[4838]: I0202 11:15:52.458497 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5110c446-0e66-4098-b30a-dfbdbc8e5fbe","Type":"ContainerStarted","Data":"5ce6551063dc670c51ac2cd165621dfe7bdfa46b77c29eb20ee5dce491f82a2b"} Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.269866 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.337953 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/785718c1-d706-4e71-8250-77f8326207d6-httpd-run\") pod \"785718c1-d706-4e71-8250-77f8326207d6\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.338006 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"785718c1-d706-4e71-8250-77f8326207d6\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.338038 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-config-data\") pod \"785718c1-d706-4e71-8250-77f8326207d6\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.338084 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-internal-tls-certs\") pod \"785718c1-d706-4e71-8250-77f8326207d6\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.338166 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-combined-ca-bundle\") pod \"785718c1-d706-4e71-8250-77f8326207d6\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.338193 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjp6h\" (UniqueName: \"kubernetes.io/projected/785718c1-d706-4e71-8250-77f8326207d6-kube-api-access-hjp6h\") pod \"785718c1-d706-4e71-8250-77f8326207d6\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.338218 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/785718c1-d706-4e71-8250-77f8326207d6-logs\") pod \"785718c1-d706-4e71-8250-77f8326207d6\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.338254 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-scripts\") pod \"785718c1-d706-4e71-8250-77f8326207d6\" (UID: \"785718c1-d706-4e71-8250-77f8326207d6\") " Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.344317 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/785718c1-d706-4e71-8250-77f8326207d6-kube-api-access-hjp6h" (OuterVolumeSpecName: "kube-api-access-hjp6h") pod "785718c1-d706-4e71-8250-77f8326207d6" (UID: "785718c1-d706-4e71-8250-77f8326207d6"). InnerVolumeSpecName "kube-api-access-hjp6h". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.356391 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/785718c1-d706-4e71-8250-77f8326207d6-logs" (OuterVolumeSpecName: "logs") pod "785718c1-d706-4e71-8250-77f8326207d6" (UID: "785718c1-d706-4e71-8250-77f8326207d6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.358059 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/785718c1-d706-4e71-8250-77f8326207d6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "785718c1-d706-4e71-8250-77f8326207d6" (UID: "785718c1-d706-4e71-8250-77f8326207d6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.364422 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-scripts" (OuterVolumeSpecName: "scripts") pod "785718c1-d706-4e71-8250-77f8326207d6" (UID: "785718c1-d706-4e71-8250-77f8326207d6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.364457 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "785718c1-d706-4e71-8250-77f8326207d6" (UID: "785718c1-d706-4e71-8250-77f8326207d6"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.390262 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "785718c1-d706-4e71-8250-77f8326207d6" (UID: "785718c1-d706-4e71-8250-77f8326207d6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.402903 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "785718c1-d706-4e71-8250-77f8326207d6" (UID: "785718c1-d706-4e71-8250-77f8326207d6"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.429916 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-config-data" (OuterVolumeSpecName: "config-data") pod "785718c1-d706-4e71-8250-77f8326207d6" (UID: "785718c1-d706-4e71-8250-77f8326207d6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.439241 4838 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.439285 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.439298 4838 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.439310 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.439322 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjp6h\" (UniqueName: \"kubernetes.io/projected/785718c1-d706-4e71-8250-77f8326207d6-kube-api-access-hjp6h\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.439333 4838 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/785718c1-d706-4e71-8250-77f8326207d6-logs\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.439345 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/785718c1-d706-4e71-8250-77f8326207d6-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.439356 4838 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/785718c1-d706-4e71-8250-77f8326207d6-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.458280 4838 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.469787 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.470012 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"785718c1-d706-4e71-8250-77f8326207d6","Type":"ContainerDied","Data":"a9f6b25aac1fa0b6a3b76db6ae57f4fd65bcb77fb7f59bd4334ebf84b155852c"} Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.470067 4838 scope.go:117] "RemoveContainer" containerID="cb31ed3f580613dfeda4c2802d70297df8ef3ac8bcbeeb274398e529a1d999be" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.472792 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5110c446-0e66-4098-b30a-dfbdbc8e5fbe","Type":"ContainerStarted","Data":"bb72a44ba5b155598afb9c359f365c8de0d2d0d7e63f4bf82546e195f0b72d29"} Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.500525 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.500505525 podStartE2EDuration="3.500505525s" podCreationTimestamp="2026-02-02 11:15:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:15:53.49803066 +0000 UTC m=+1347.835131688" watchObservedRunningTime="2026-02-02 11:15:53.500505525 +0000 UTC m=+1347.837606563" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.519347 4838 scope.go:117] "RemoveContainer" containerID="cd8ce6306c3129ac1384b3de5bee70d6589d86293795dd91eaabda36bf5587e2" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.532021 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.542041 4838 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.554257 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.571727 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 11:15:53 crc kubenswrapper[4838]: E0202 11:15:53.572174 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="785718c1-d706-4e71-8250-77f8326207d6" containerName="glance-httpd" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.572190 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="785718c1-d706-4e71-8250-77f8326207d6" containerName="glance-httpd" Feb 02 11:15:53 crc kubenswrapper[4838]: E0202 11:15:53.572206 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="785718c1-d706-4e71-8250-77f8326207d6" containerName="glance-log" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.572213 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="785718c1-d706-4e71-8250-77f8326207d6" containerName="glance-log" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.572382 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="785718c1-d706-4e71-8250-77f8326207d6" containerName="glance-log" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.572407 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="785718c1-d706-4e71-8250-77f8326207d6" containerName="glance-httpd" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.573431 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.575885 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.580884 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.581030 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.721868 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-bh9vr"] Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.724212 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-bh9vr" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.752060 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8824c6f8-8e4e-436f-a4c5-755ac38d0979-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.752187 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gl6m\" (UniqueName: \"kubernetes.io/projected/8824c6f8-8e4e-436f-a4c5-755ac38d0979-kube-api-access-8gl6m\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.752264 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8824c6f8-8e4e-436f-a4c5-755ac38d0979-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.752376 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8824c6f8-8e4e-436f-a4c5-755ac38d0979-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.752431 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8824c6f8-8e4e-436f-a4c5-755ac38d0979-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.752506 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8824c6f8-8e4e-436f-a4c5-755ac38d0979-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.752576 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8824c6f8-8e4e-436f-a4c5-755ac38d0979-logs\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.752656 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.769598 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-bh9vr"] Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.840443 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-mpdzc"] Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.841760 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-mpdzc" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.855050 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8824c6f8-8e4e-436f-a4c5-755ac38d0979-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.855141 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8824c6f8-8e4e-436f-a4c5-755ac38d0979-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.855184 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbrf4\" (UniqueName: \"kubernetes.io/projected/052c9a16-08e7-4892-8bcd-dc3055e14ca8-kube-api-access-qbrf4\") pod \"nova-api-db-create-bh9vr\" (UID: \"052c9a16-08e7-4892-8bcd-dc3055e14ca8\") " pod="openstack/nova-api-db-create-bh9vr" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.855207 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8824c6f8-8e4e-436f-a4c5-755ac38d0979-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.855244 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8824c6f8-8e4e-436f-a4c5-755ac38d0979-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.855278 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/052c9a16-08e7-4892-8bcd-dc3055e14ca8-operator-scripts\") pod \"nova-api-db-create-bh9vr\" (UID: \"052c9a16-08e7-4892-8bcd-dc3055e14ca8\") " pod="openstack/nova-api-db-create-bh9vr" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.855307 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8824c6f8-8e4e-436f-a4c5-755ac38d0979-logs\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.855519 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.855583 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8824c6f8-8e4e-436f-a4c5-755ac38d0979-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.855641 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gl6m\" (UniqueName: \"kubernetes.io/projected/8824c6f8-8e4e-436f-a4c5-755ac38d0979-kube-api-access-8gl6m\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.860657 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-mpdzc"] Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.861129 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8824c6f8-8e4e-436f-a4c5-755ac38d0979-logs\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.861397 4838 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.864099 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8824c6f8-8e4e-436f-a4c5-755ac38d0979-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.868494 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8824c6f8-8e4e-436f-a4c5-755ac38d0979-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.877411 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8824c6f8-8e4e-436f-a4c5-755ac38d0979-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.879080 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8824c6f8-8e4e-436f-a4c5-755ac38d0979-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.879111 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gl6m\" (UniqueName: \"kubernetes.io/projected/8824c6f8-8e4e-436f-a4c5-755ac38d0979-kube-api-access-8gl6m\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.879163 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8824c6f8-8e4e-436f-a4c5-755ac38d0979-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.895245 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"8824c6f8-8e4e-436f-a4c5-755ac38d0979\") " pod="openstack/glance-default-internal-api-0" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.929409 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-e31d-account-create-update-xckgb"] Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.930607 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-e31d-account-create-update-xckgb" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.932801 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.939575 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-e31d-account-create-update-xckgb"] Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.956874 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d369bf9a-8e44-4186-8513-9f73bd321e6e-operator-scripts\") pod \"nova-cell0-db-create-mpdzc\" (UID: \"d369bf9a-8e44-4186-8513-9f73bd321e6e\") " pod="openstack/nova-cell0-db-create-mpdzc" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.956928 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbrf4\" (UniqueName: \"kubernetes.io/projected/052c9a16-08e7-4892-8bcd-dc3055e14ca8-kube-api-access-qbrf4\") pod \"nova-api-db-create-bh9vr\" (UID: \"052c9a16-08e7-4892-8bcd-dc3055e14ca8\") " pod="openstack/nova-api-db-create-bh9vr" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.956965 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blv4n\" (UniqueName: \"kubernetes.io/projected/d369bf9a-8e44-4186-8513-9f73bd321e6e-kube-api-access-blv4n\") pod \"nova-cell0-db-create-mpdzc\" (UID: \"d369bf9a-8e44-4186-8513-9f73bd321e6e\") " pod="openstack/nova-cell0-db-create-mpdzc" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.957021 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/052c9a16-08e7-4892-8bcd-dc3055e14ca8-operator-scripts\") pod \"nova-api-db-create-bh9vr\" (UID: \"052c9a16-08e7-4892-8bcd-dc3055e14ca8\") " pod="openstack/nova-api-db-create-bh9vr" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.958237 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/052c9a16-08e7-4892-8bcd-dc3055e14ca8-operator-scripts\") pod \"nova-api-db-create-bh9vr\" (UID: \"052c9a16-08e7-4892-8bcd-dc3055e14ca8\") " pod="openstack/nova-api-db-create-bh9vr" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.960747 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-bx8gk"] Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.962782 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-bx8gk" Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.970231 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-bx8gk"] Feb 02 11:15:53 crc kubenswrapper[4838]: I0202 11:15:53.982052 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbrf4\" (UniqueName: \"kubernetes.io/projected/052c9a16-08e7-4892-8bcd-dc3055e14ca8-kube-api-access-qbrf4\") pod \"nova-api-db-create-bh9vr\" (UID: \"052c9a16-08e7-4892-8bcd-dc3055e14ca8\") " pod="openstack/nova-api-db-create-bh9vr" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.058728 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c77290d-3574-4eb4-ab26-abf2f666367b-operator-scripts\") pod \"nova-cell1-db-create-bx8gk\" (UID: \"6c77290d-3574-4eb4-ab26-abf2f666367b\") " pod="openstack/nova-cell1-db-create-bx8gk" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.059083 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55-operator-scripts\") pod \"nova-api-e31d-account-create-update-xckgb\" (UID: \"04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55\") " pod="openstack/nova-api-e31d-account-create-update-xckgb" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.059182 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vb7cc\" (UniqueName: \"kubernetes.io/projected/6c77290d-3574-4eb4-ab26-abf2f666367b-kube-api-access-vb7cc\") pod \"nova-cell1-db-create-bx8gk\" (UID: \"6c77290d-3574-4eb4-ab26-abf2f666367b\") " pod="openstack/nova-cell1-db-create-bx8gk" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.059263 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d369bf9a-8e44-4186-8513-9f73bd321e6e-operator-scripts\") pod \"nova-cell0-db-create-mpdzc\" (UID: \"d369bf9a-8e44-4186-8513-9f73bd321e6e\") " pod="openstack/nova-cell0-db-create-mpdzc" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.059399 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blv4n\" (UniqueName: \"kubernetes.io/projected/d369bf9a-8e44-4186-8513-9f73bd321e6e-kube-api-access-blv4n\") pod \"nova-cell0-db-create-mpdzc\" (UID: \"d369bf9a-8e44-4186-8513-9f73bd321e6e\") " pod="openstack/nova-cell0-db-create-mpdzc" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.059883 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drbhc\" (UniqueName: \"kubernetes.io/projected/04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55-kube-api-access-drbhc\") pod \"nova-api-e31d-account-create-update-xckgb\" (UID: \"04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55\") " pod="openstack/nova-api-e31d-account-create-update-xckgb" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.060134 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d369bf9a-8e44-4186-8513-9f73bd321e6e-operator-scripts\") pod \"nova-cell0-db-create-mpdzc\" (UID: \"d369bf9a-8e44-4186-8513-9f73bd321e6e\") " pod="openstack/nova-cell0-db-create-mpdzc" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.075450 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-bh9vr" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.076764 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blv4n\" (UniqueName: \"kubernetes.io/projected/d369bf9a-8e44-4186-8513-9f73bd321e6e-kube-api-access-blv4n\") pod \"nova-cell0-db-create-mpdzc\" (UID: \"d369bf9a-8e44-4186-8513-9f73bd321e6e\") " pod="openstack/nova-cell0-db-create-mpdzc" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.141703 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-5f6e-account-create-update-jp75l"] Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.143119 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5f6e-account-create-update-jp75l" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.146110 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.161195 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drbhc\" (UniqueName: \"kubernetes.io/projected/04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55-kube-api-access-drbhc\") pod \"nova-api-e31d-account-create-update-xckgb\" (UID: \"04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55\") " pod="openstack/nova-api-e31d-account-create-update-xckgb" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.161267 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c77290d-3574-4eb4-ab26-abf2f666367b-operator-scripts\") pod \"nova-cell1-db-create-bx8gk\" (UID: \"6c77290d-3574-4eb4-ab26-abf2f666367b\") " pod="openstack/nova-cell1-db-create-bx8gk" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.161364 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55-operator-scripts\") pod \"nova-api-e31d-account-create-update-xckgb\" (UID: \"04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55\") " pod="openstack/nova-api-e31d-account-create-update-xckgb" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.161395 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vb7cc\" (UniqueName: \"kubernetes.io/projected/6c77290d-3574-4eb4-ab26-abf2f666367b-kube-api-access-vb7cc\") pod \"nova-cell1-db-create-bx8gk\" (UID: \"6c77290d-3574-4eb4-ab26-abf2f666367b\") " pod="openstack/nova-cell1-db-create-bx8gk" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.162191 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c77290d-3574-4eb4-ab26-abf2f666367b-operator-scripts\") pod \"nova-cell1-db-create-bx8gk\" (UID: \"6c77290d-3574-4eb4-ab26-abf2f666367b\") " pod="openstack/nova-cell1-db-create-bx8gk" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.162442 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55-operator-scripts\") pod \"nova-api-e31d-account-create-update-xckgb\" (UID: \"04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55\") " pod="openstack/nova-api-e31d-account-create-update-xckgb" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.166231 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-mpdzc" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.181541 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-5f6e-account-create-update-jp75l"] Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.190166 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drbhc\" (UniqueName: \"kubernetes.io/projected/04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55-kube-api-access-drbhc\") pod \"nova-api-e31d-account-create-update-xckgb\" (UID: \"04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55\") " pod="openstack/nova-api-e31d-account-create-update-xckgb" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.191078 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.192092 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vb7cc\" (UniqueName: \"kubernetes.io/projected/6c77290d-3574-4eb4-ab26-abf2f666367b-kube-api-access-vb7cc\") pod \"nova-cell1-db-create-bx8gk\" (UID: \"6c77290d-3574-4eb4-ab26-abf2f666367b\") " pod="openstack/nova-cell1-db-create-bx8gk" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.250902 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-e31d-account-create-update-xckgb" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.263160 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2k8x\" (UniqueName: \"kubernetes.io/projected/c77c3373-fdbf-4a38-8ab0-ce701577f29f-kube-api-access-f2k8x\") pod \"nova-cell0-5f6e-account-create-update-jp75l\" (UID: \"c77c3373-fdbf-4a38-8ab0-ce701577f29f\") " pod="openstack/nova-cell0-5f6e-account-create-update-jp75l" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.263193 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c77c3373-fdbf-4a38-8ab0-ce701577f29f-operator-scripts\") pod \"nova-cell0-5f6e-account-create-update-jp75l\" (UID: \"c77c3373-fdbf-4a38-8ab0-ce701577f29f\") " pod="openstack/nova-cell0-5f6e-account-create-update-jp75l" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.282054 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-bx8gk" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.332422 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-2b0b-account-create-update-kk8h4"] Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.333417 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2b0b-account-create-update-kk8h4" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.336987 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.353800 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-2b0b-account-create-update-kk8h4"] Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.367445 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2k8x\" (UniqueName: \"kubernetes.io/projected/c77c3373-fdbf-4a38-8ab0-ce701577f29f-kube-api-access-f2k8x\") pod \"nova-cell0-5f6e-account-create-update-jp75l\" (UID: \"c77c3373-fdbf-4a38-8ab0-ce701577f29f\") " pod="openstack/nova-cell0-5f6e-account-create-update-jp75l" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.367492 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c77c3373-fdbf-4a38-8ab0-ce701577f29f-operator-scripts\") pod \"nova-cell0-5f6e-account-create-update-jp75l\" (UID: \"c77c3373-fdbf-4a38-8ab0-ce701577f29f\") " pod="openstack/nova-cell0-5f6e-account-create-update-jp75l" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.370342 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c77c3373-fdbf-4a38-8ab0-ce701577f29f-operator-scripts\") pod \"nova-cell0-5f6e-account-create-update-jp75l\" (UID: \"c77c3373-fdbf-4a38-8ab0-ce701577f29f\") " pod="openstack/nova-cell0-5f6e-account-create-update-jp75l" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.385931 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2k8x\" (UniqueName: \"kubernetes.io/projected/c77c3373-fdbf-4a38-8ab0-ce701577f29f-kube-api-access-f2k8x\") pod \"nova-cell0-5f6e-account-create-update-jp75l\" (UID: \"c77c3373-fdbf-4a38-8ab0-ce701577f29f\") " pod="openstack/nova-cell0-5f6e-account-create-update-jp75l" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.461814 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5f6e-account-create-update-jp75l" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.468931 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmxzx\" (UniqueName: \"kubernetes.io/projected/8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3-kube-api-access-dmxzx\") pod \"nova-cell1-2b0b-account-create-update-kk8h4\" (UID: \"8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3\") " pod="openstack/nova-cell1-2b0b-account-create-update-kk8h4" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.469070 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3-operator-scripts\") pod \"nova-cell1-2b0b-account-create-update-kk8h4\" (UID: \"8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3\") " pod="openstack/nova-cell1-2b0b-account-create-update-kk8h4" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.519162 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="785718c1-d706-4e71-8250-77f8326207d6" path="/var/lib/kubelet/pods/785718c1-d706-4e71-8250-77f8326207d6/volumes" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.570362 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmxzx\" (UniqueName: \"kubernetes.io/projected/8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3-kube-api-access-dmxzx\") pod \"nova-cell1-2b0b-account-create-update-kk8h4\" (UID: \"8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3\") " pod="openstack/nova-cell1-2b0b-account-create-update-kk8h4" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.570432 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3-operator-scripts\") pod \"nova-cell1-2b0b-account-create-update-kk8h4\" (UID: \"8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3\") " pod="openstack/nova-cell1-2b0b-account-create-update-kk8h4" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.571110 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3-operator-scripts\") pod \"nova-cell1-2b0b-account-create-update-kk8h4\" (UID: \"8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3\") " pod="openstack/nova-cell1-2b0b-account-create-update-kk8h4" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.600187 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmxzx\" (UniqueName: \"kubernetes.io/projected/8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3-kube-api-access-dmxzx\") pod \"nova-cell1-2b0b-account-create-update-kk8h4\" (UID: \"8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3\") " pod="openstack/nova-cell1-2b0b-account-create-update-kk8h4" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.638702 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-bh9vr"] Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.750951 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2b0b-account-create-update-kk8h4" Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.758089 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-mpdzc"] Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.851229 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-e31d-account-create-update-xckgb"] Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.859609 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-bx8gk"] Feb 02 11:15:54 crc kubenswrapper[4838]: I0202 11:15:54.935461 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 11:15:54 crc kubenswrapper[4838]: W0202 11:15:54.944933 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8824c6f8_8e4e_436f_a4c5_755ac38d0979.slice/crio-c6eb550a26888ca8526b313f015fa7081d407bd4ceb748299a9b4e04a3b9de26 WatchSource:0}: Error finding container c6eb550a26888ca8526b313f015fa7081d407bd4ceb748299a9b4e04a3b9de26: Status 404 returned error can't find the container with id c6eb550a26888ca8526b313f015fa7081d407bd4ceb748299a9b4e04a3b9de26 Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.035795 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-5f6e-account-create-update-jp75l"] Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.252471 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-2b0b-account-create-update-kk8h4"] Feb 02 11:15:55 crc kubenswrapper[4838]: W0202 11:15:55.305768 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8b1e17d7_7746_46df_bcfd_2ca72c0cd2b3.slice/crio-d9ad92c78d8ab6e65a8cce1b16f176d5971b28b0853c551da8af62a0e29e8499 WatchSource:0}: Error finding container d9ad92c78d8ab6e65a8cce1b16f176d5971b28b0853c551da8af62a0e29e8499: Status 404 returned error can't find the container with id d9ad92c78d8ab6e65a8cce1b16f176d5971b28b0853c551da8af62a0e29e8499 Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.461330 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.534904 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-bh9vr" event={"ID":"052c9a16-08e7-4892-8bcd-dc3055e14ca8","Type":"ContainerStarted","Data":"a4509ee30dd40d9774a51e328127f4308faaa33f39254883fe22b5aed9fbc5da"} Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.534939 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-bh9vr" event={"ID":"052c9a16-08e7-4892-8bcd-dc3055e14ca8","Type":"ContainerStarted","Data":"ea72f8d29382767d6ff11b188a458dde5d64f8f84405b1c765d3f5e7085698e5"} Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.544788 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-2b0b-account-create-update-kk8h4" event={"ID":"8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3","Type":"ContainerStarted","Data":"d9ad92c78d8ab6e65a8cce1b16f176d5971b28b0853c551da8af62a0e29e8499"} Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.548091 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5f6e-account-create-update-jp75l" event={"ID":"c77c3373-fdbf-4a38-8ab0-ce701577f29f","Type":"ContainerStarted","Data":"6df96c07fadf519e4d11b6b4682d888cf0dfcc578d458c63cce5eb1fc22ba175"} Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.549914 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-bx8gk" event={"ID":"6c77290d-3574-4eb4-ab26-abf2f666367b","Type":"ContainerStarted","Data":"4d28615ece9b3ff7f5dc10cda05fa7841f11621b3dc1bb2c6b7d8dc77ba77af8"} Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.553716 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8824c6f8-8e4e-436f-a4c5-755ac38d0979","Type":"ContainerStarted","Data":"c6eb550a26888ca8526b313f015fa7081d407bd4ceb748299a9b4e04a3b9de26"} Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.558834 4838 generic.go:334] "Generic (PLEG): container finished" podID="6b95b77a-01f4-4847-a86b-d5c8564df612" containerID="2eaa60014fb589380fcb5b96a3cc14d9d1db9cd572787f7ffdba25d16bb92eb7" exitCode=0 Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.558897 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b95b77a-01f4-4847-a86b-d5c8564df612","Type":"ContainerDied","Data":"2eaa60014fb589380fcb5b96a3cc14d9d1db9cd572787f7ffdba25d16bb92eb7"} Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.558923 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b95b77a-01f4-4847-a86b-d5c8564df612","Type":"ContainerDied","Data":"fdfe40872090be9ef50986ceee44cfea332e63a7f0a97392db5699f067442a1c"} Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.558946 4838 scope.go:117] "RemoveContainer" containerID="85e8d418e9aca743b463c26d63f445c6079b099d946e2eb61f58250945e69897" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.559083 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.559751 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-bh9vr" podStartSLOduration=2.559736076 podStartE2EDuration="2.559736076s" podCreationTimestamp="2026-02-02 11:15:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:15:55.555940616 +0000 UTC m=+1349.893041664" watchObservedRunningTime="2026-02-02 11:15:55.559736076 +0000 UTC m=+1349.896837114" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.562151 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-mpdzc" event={"ID":"d369bf9a-8e44-4186-8513-9f73bd321e6e","Type":"ContainerStarted","Data":"0523e6f9371b83d8fa564b2fcaf1c2e3875f816ee2dcae8ee62a23c3d4353542"} Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.563293 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-e31d-account-create-update-xckgb" event={"ID":"04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55","Type":"ContainerStarted","Data":"88d102ddb480b4b9cb317a1b716def63232f747be3e6e71fa74408d42a02f813"} Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.593381 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-sg-core-conf-yaml\") pod \"6b95b77a-01f4-4847-a86b-d5c8564df612\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.593603 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pknc\" (UniqueName: \"kubernetes.io/projected/6b95b77a-01f4-4847-a86b-d5c8564df612-kube-api-access-4pknc\") pod \"6b95b77a-01f4-4847-a86b-d5c8564df612\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.593684 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b95b77a-01f4-4847-a86b-d5c8564df612-log-httpd\") pod \"6b95b77a-01f4-4847-a86b-d5c8564df612\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.593803 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-combined-ca-bundle\") pod \"6b95b77a-01f4-4847-a86b-d5c8564df612\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.593848 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b95b77a-01f4-4847-a86b-d5c8564df612-run-httpd\") pod \"6b95b77a-01f4-4847-a86b-d5c8564df612\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.593902 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-scripts\") pod \"6b95b77a-01f4-4847-a86b-d5c8564df612\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.593988 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-config-data\") pod \"6b95b77a-01f4-4847-a86b-d5c8564df612\" (UID: \"6b95b77a-01f4-4847-a86b-d5c8564df612\") " Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.594830 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b95b77a-01f4-4847-a86b-d5c8564df612-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6b95b77a-01f4-4847-a86b-d5c8564df612" (UID: "6b95b77a-01f4-4847-a86b-d5c8564df612"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.595513 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b95b77a-01f4-4847-a86b-d5c8564df612-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6b95b77a-01f4-4847-a86b-d5c8564df612" (UID: "6b95b77a-01f4-4847-a86b-d5c8564df612"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.602801 4838 scope.go:117] "RemoveContainer" containerID="d14a4e3745ae58787229a175b1945feb68d46aa6aed7089bf2d9a2a18e6dd383" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.603855 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b95b77a-01f4-4847-a86b-d5c8564df612-kube-api-access-4pknc" (OuterVolumeSpecName: "kube-api-access-4pknc") pod "6b95b77a-01f4-4847-a86b-d5c8564df612" (UID: "6b95b77a-01f4-4847-a86b-d5c8564df612"). InnerVolumeSpecName "kube-api-access-4pknc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.609063 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-scripts" (OuterVolumeSpecName: "scripts") pod "6b95b77a-01f4-4847-a86b-d5c8564df612" (UID: "6b95b77a-01f4-4847-a86b-d5c8564df612"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.652785 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6b95b77a-01f4-4847-a86b-d5c8564df612" (UID: "6b95b77a-01f4-4847-a86b-d5c8564df612"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.689823 4838 scope.go:117] "RemoveContainer" containerID="2eaa60014fb589380fcb5b96a3cc14d9d1db9cd572787f7ffdba25d16bb92eb7" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.689985 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b95b77a-01f4-4847-a86b-d5c8564df612" (UID: "6b95b77a-01f4-4847-a86b-d5c8564df612"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.697422 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pknc\" (UniqueName: \"kubernetes.io/projected/6b95b77a-01f4-4847-a86b-d5c8564df612-kube-api-access-4pknc\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.697471 4838 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b95b77a-01f4-4847-a86b-d5c8564df612-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.697483 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.697492 4838 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b95b77a-01f4-4847-a86b-d5c8564df612-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.697500 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.697508 4838 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.711509 4838 scope.go:117] "RemoveContainer" containerID="e9ec1a2d93a4032109413aac85c15fa3595dd572c03937d48199246ac7740224" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.715901 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-config-data" (OuterVolumeSpecName: "config-data") pod "6b95b77a-01f4-4847-a86b-d5c8564df612" (UID: "6b95b77a-01f4-4847-a86b-d5c8564df612"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.739745 4838 scope.go:117] "RemoveContainer" containerID="85e8d418e9aca743b463c26d63f445c6079b099d946e2eb61f58250945e69897" Feb 02 11:15:55 crc kubenswrapper[4838]: E0202 11:15:55.740239 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85e8d418e9aca743b463c26d63f445c6079b099d946e2eb61f58250945e69897\": container with ID starting with 85e8d418e9aca743b463c26d63f445c6079b099d946e2eb61f58250945e69897 not found: ID does not exist" containerID="85e8d418e9aca743b463c26d63f445c6079b099d946e2eb61f58250945e69897" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.740282 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85e8d418e9aca743b463c26d63f445c6079b099d946e2eb61f58250945e69897"} err="failed to get container status \"85e8d418e9aca743b463c26d63f445c6079b099d946e2eb61f58250945e69897\": rpc error: code = NotFound desc = could not find container \"85e8d418e9aca743b463c26d63f445c6079b099d946e2eb61f58250945e69897\": container with ID starting with 85e8d418e9aca743b463c26d63f445c6079b099d946e2eb61f58250945e69897 not found: ID does not exist" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.740310 4838 scope.go:117] "RemoveContainer" containerID="d14a4e3745ae58787229a175b1945feb68d46aa6aed7089bf2d9a2a18e6dd383" Feb 02 11:15:55 crc kubenswrapper[4838]: E0202 11:15:55.740646 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d14a4e3745ae58787229a175b1945feb68d46aa6aed7089bf2d9a2a18e6dd383\": container with ID starting with d14a4e3745ae58787229a175b1945feb68d46aa6aed7089bf2d9a2a18e6dd383 not found: ID does not exist" containerID="d14a4e3745ae58787229a175b1945feb68d46aa6aed7089bf2d9a2a18e6dd383" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.740681 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d14a4e3745ae58787229a175b1945feb68d46aa6aed7089bf2d9a2a18e6dd383"} err="failed to get container status \"d14a4e3745ae58787229a175b1945feb68d46aa6aed7089bf2d9a2a18e6dd383\": rpc error: code = NotFound desc = could not find container \"d14a4e3745ae58787229a175b1945feb68d46aa6aed7089bf2d9a2a18e6dd383\": container with ID starting with d14a4e3745ae58787229a175b1945feb68d46aa6aed7089bf2d9a2a18e6dd383 not found: ID does not exist" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.740699 4838 scope.go:117] "RemoveContainer" containerID="2eaa60014fb589380fcb5b96a3cc14d9d1db9cd572787f7ffdba25d16bb92eb7" Feb 02 11:15:55 crc kubenswrapper[4838]: E0202 11:15:55.740907 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2eaa60014fb589380fcb5b96a3cc14d9d1db9cd572787f7ffdba25d16bb92eb7\": container with ID starting with 2eaa60014fb589380fcb5b96a3cc14d9d1db9cd572787f7ffdba25d16bb92eb7 not found: ID does not exist" containerID="2eaa60014fb589380fcb5b96a3cc14d9d1db9cd572787f7ffdba25d16bb92eb7" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.740929 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2eaa60014fb589380fcb5b96a3cc14d9d1db9cd572787f7ffdba25d16bb92eb7"} err="failed to get container status \"2eaa60014fb589380fcb5b96a3cc14d9d1db9cd572787f7ffdba25d16bb92eb7\": rpc error: code = NotFound desc = could not find container \"2eaa60014fb589380fcb5b96a3cc14d9d1db9cd572787f7ffdba25d16bb92eb7\": container with ID starting with 2eaa60014fb589380fcb5b96a3cc14d9d1db9cd572787f7ffdba25d16bb92eb7 not found: ID does not exist" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.740941 4838 scope.go:117] "RemoveContainer" containerID="e9ec1a2d93a4032109413aac85c15fa3595dd572c03937d48199246ac7740224" Feb 02 11:15:55 crc kubenswrapper[4838]: E0202 11:15:55.741128 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9ec1a2d93a4032109413aac85c15fa3595dd572c03937d48199246ac7740224\": container with ID starting with e9ec1a2d93a4032109413aac85c15fa3595dd572c03937d48199246ac7740224 not found: ID does not exist" containerID="e9ec1a2d93a4032109413aac85c15fa3595dd572c03937d48199246ac7740224" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.741149 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9ec1a2d93a4032109413aac85c15fa3595dd572c03937d48199246ac7740224"} err="failed to get container status \"e9ec1a2d93a4032109413aac85c15fa3595dd572c03937d48199246ac7740224\": rpc error: code = NotFound desc = could not find container \"e9ec1a2d93a4032109413aac85c15fa3595dd572c03937d48199246ac7740224\": container with ID starting with e9ec1a2d93a4032109413aac85c15fa3595dd572c03937d48199246ac7740224 not found: ID does not exist" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.798921 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b95b77a-01f4-4847-a86b-d5c8564df612-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.891642 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.900420 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.921919 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:15:55 crc kubenswrapper[4838]: E0202 11:15:55.922314 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b95b77a-01f4-4847-a86b-d5c8564df612" containerName="ceilometer-notification-agent" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.922340 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b95b77a-01f4-4847-a86b-d5c8564df612" containerName="ceilometer-notification-agent" Feb 02 11:15:55 crc kubenswrapper[4838]: E0202 11:15:55.922361 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b95b77a-01f4-4847-a86b-d5c8564df612" containerName="sg-core" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.922369 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b95b77a-01f4-4847-a86b-d5c8564df612" containerName="sg-core" Feb 02 11:15:55 crc kubenswrapper[4838]: E0202 11:15:55.922391 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b95b77a-01f4-4847-a86b-d5c8564df612" containerName="proxy-httpd" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.922398 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b95b77a-01f4-4847-a86b-d5c8564df612" containerName="proxy-httpd" Feb 02 11:15:55 crc kubenswrapper[4838]: E0202 11:15:55.922415 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b95b77a-01f4-4847-a86b-d5c8564df612" containerName="ceilometer-central-agent" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.922424 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b95b77a-01f4-4847-a86b-d5c8564df612" containerName="ceilometer-central-agent" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.922684 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b95b77a-01f4-4847-a86b-d5c8564df612" containerName="ceilometer-notification-agent" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.922710 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b95b77a-01f4-4847-a86b-d5c8564df612" containerName="sg-core" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.922724 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b95b77a-01f4-4847-a86b-d5c8564df612" containerName="ceilometer-central-agent" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.922737 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b95b77a-01f4-4847-a86b-d5c8564df612" containerName="proxy-httpd" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.924661 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.927150 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.927225 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 11:15:55 crc kubenswrapper[4838]: I0202 11:15:55.936170 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.002255 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-scripts\") pod \"ceilometer-0\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.002315 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0331ec29-4194-41b3-a014-bdf3f869aa17-log-httpd\") pod \"ceilometer-0\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.002362 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.002438 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0331ec29-4194-41b3-a014-bdf3f869aa17-run-httpd\") pod \"ceilometer-0\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.002475 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.002496 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-config-data\") pod \"ceilometer-0\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.002519 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2gd6\" (UniqueName: \"kubernetes.io/projected/0331ec29-4194-41b3-a014-bdf3f869aa17-kube-api-access-h2gd6\") pod \"ceilometer-0\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.104790 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-scripts\") pod \"ceilometer-0\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.104879 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0331ec29-4194-41b3-a014-bdf3f869aa17-log-httpd\") pod \"ceilometer-0\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.104929 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.104962 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0331ec29-4194-41b3-a014-bdf3f869aa17-run-httpd\") pod \"ceilometer-0\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.104990 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.105396 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0331ec29-4194-41b3-a014-bdf3f869aa17-log-httpd\") pod \"ceilometer-0\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.105602 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0331ec29-4194-41b3-a014-bdf3f869aa17-run-httpd\") pod \"ceilometer-0\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.106270 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-config-data\") pod \"ceilometer-0\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.106313 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2gd6\" (UniqueName: \"kubernetes.io/projected/0331ec29-4194-41b3-a014-bdf3f869aa17-kube-api-access-h2gd6\") pod \"ceilometer-0\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.109170 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.109304 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.109344 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-scripts\") pod \"ceilometer-0\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.110418 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-config-data\") pod \"ceilometer-0\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.124288 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2gd6\" (UniqueName: \"kubernetes.io/projected/0331ec29-4194-41b3-a014-bdf3f869aa17-kube-api-access-h2gd6\") pod \"ceilometer-0\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.246014 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.528062 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b95b77a-01f4-4847-a86b-d5c8564df612" path="/var/lib/kubelet/pods/6b95b77a-01f4-4847-a86b-d5c8564df612/volumes" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.595095 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-2b0b-account-create-update-kk8h4" event={"ID":"8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3","Type":"ContainerStarted","Data":"30ba2e023fa61f40d57ac45157d0816e07d0ffa2117bfb9862c75fcc972e1bcb"} Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.597933 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5f6e-account-create-update-jp75l" event={"ID":"c77c3373-fdbf-4a38-8ab0-ce701577f29f","Type":"ContainerStarted","Data":"093cc634de90b5c2024fdd65ae71c20107d325c1d7fb2acfa1d56c855a7890cb"} Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.600902 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-bx8gk" event={"ID":"6c77290d-3574-4eb4-ab26-abf2f666367b","Type":"ContainerStarted","Data":"d8d0f7451dcbb418f26043ca0932b0d2cea018fd39287787d4f3fdad971ceee4"} Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.606136 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8824c6f8-8e4e-436f-a4c5-755ac38d0979","Type":"ContainerStarted","Data":"a75d09b9f3599cf9d9803a828638e470c8b3ab92eef23769543f60272234db33"} Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.610282 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-mpdzc" event={"ID":"d369bf9a-8e44-4186-8513-9f73bd321e6e","Type":"ContainerStarted","Data":"858527d4cfb040fb4f32e431c29bb41f26498554fd87d58c448f438b4a7a8c7f"} Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.615022 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-e31d-account-create-update-xckgb" event={"ID":"04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55","Type":"ContainerStarted","Data":"0b5d55f1358badab9b4cea5a00f3aaa32f6b79b2e6923f7aa38805adfd89f216"} Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.655800 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-2b0b-account-create-update-kk8h4" podStartSLOduration=2.65577903 podStartE2EDuration="2.65577903s" podCreationTimestamp="2026-02-02 11:15:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:15:56.647681877 +0000 UTC m=+1350.984782905" watchObservedRunningTime="2026-02-02 11:15:56.65577903 +0000 UTC m=+1350.992880068" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.665923 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-e31d-account-create-update-xckgb" podStartSLOduration=3.665901797 podStartE2EDuration="3.665901797s" podCreationTimestamp="2026-02-02 11:15:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:15:56.660460144 +0000 UTC m=+1350.997561172" watchObservedRunningTime="2026-02-02 11:15:56.665901797 +0000 UTC m=+1351.003002845" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.676290 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-bx8gk" podStartSLOduration=3.676271551 podStartE2EDuration="3.676271551s" podCreationTimestamp="2026-02-02 11:15:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:15:56.671526296 +0000 UTC m=+1351.008627334" watchObservedRunningTime="2026-02-02 11:15:56.676271551 +0000 UTC m=+1351.013372579" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.699811 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-5f6e-account-create-update-jp75l" podStartSLOduration=2.699784882 podStartE2EDuration="2.699784882s" podCreationTimestamp="2026-02-02 11:15:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:15:56.686684696 +0000 UTC m=+1351.023785724" watchObservedRunningTime="2026-02-02 11:15:56.699784882 +0000 UTC m=+1351.036885920" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.704166 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-mpdzc" podStartSLOduration=3.704155627 podStartE2EDuration="3.704155627s" podCreationTimestamp="2026-02-02 11:15:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:15:56.700354347 +0000 UTC m=+1351.037455375" watchObservedRunningTime="2026-02-02 11:15:56.704155627 +0000 UTC m=+1351.041256655" Feb 02 11:15:56 crc kubenswrapper[4838]: I0202 11:15:56.778236 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:15:56 crc kubenswrapper[4838]: W0202 11:15:56.782057 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0331ec29_4194_41b3_a014_bdf3f869aa17.slice/crio-27687f42c9604160214d7570dd0bafc5be4d54142a9485404fc3e2c17453cae3 WatchSource:0}: Error finding container 27687f42c9604160214d7570dd0bafc5be4d54142a9485404fc3e2c17453cae3: Status 404 returned error can't find the container with id 27687f42c9604160214d7570dd0bafc5be4d54142a9485404fc3e2c17453cae3 Feb 02 11:15:57 crc kubenswrapper[4838]: I0202 11:15:57.640075 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8824c6f8-8e4e-436f-a4c5-755ac38d0979","Type":"ContainerStarted","Data":"9a1333f93691ce7b44740762d94c162f0e029288e5062c5f937a9dc2fb101841"} Feb 02 11:15:57 crc kubenswrapper[4838]: I0202 11:15:57.641789 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0331ec29-4194-41b3-a014-bdf3f869aa17","Type":"ContainerStarted","Data":"27687f42c9604160214d7570dd0bafc5be4d54142a9485404fc3e2c17453cae3"} Feb 02 11:15:57 crc kubenswrapper[4838]: I0202 11:15:57.644206 4838 generic.go:334] "Generic (PLEG): container finished" podID="6c77290d-3574-4eb4-ab26-abf2f666367b" containerID="d8d0f7451dcbb418f26043ca0932b0d2cea018fd39287787d4f3fdad971ceee4" exitCode=0 Feb 02 11:15:57 crc kubenswrapper[4838]: I0202 11:15:57.644262 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-bx8gk" event={"ID":"6c77290d-3574-4eb4-ab26-abf2f666367b","Type":"ContainerDied","Data":"d8d0f7451dcbb418f26043ca0932b0d2cea018fd39287787d4f3fdad971ceee4"} Feb 02 11:15:57 crc kubenswrapper[4838]: I0202 11:15:57.679047 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.679024782 podStartE2EDuration="4.679024782s" podCreationTimestamp="2026-02-02 11:15:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:15:57.670433855 +0000 UTC m=+1352.007534913" watchObservedRunningTime="2026-02-02 11:15:57.679024782 +0000 UTC m=+1352.016125810" Feb 02 11:15:59 crc kubenswrapper[4838]: I0202 11:15:59.104166 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-bx8gk" Feb 02 11:15:59 crc kubenswrapper[4838]: I0202 11:15:59.262666 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c77290d-3574-4eb4-ab26-abf2f666367b-operator-scripts\") pod \"6c77290d-3574-4eb4-ab26-abf2f666367b\" (UID: \"6c77290d-3574-4eb4-ab26-abf2f666367b\") " Feb 02 11:15:59 crc kubenswrapper[4838]: I0202 11:15:59.262930 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vb7cc\" (UniqueName: \"kubernetes.io/projected/6c77290d-3574-4eb4-ab26-abf2f666367b-kube-api-access-vb7cc\") pod \"6c77290d-3574-4eb4-ab26-abf2f666367b\" (UID: \"6c77290d-3574-4eb4-ab26-abf2f666367b\") " Feb 02 11:15:59 crc kubenswrapper[4838]: I0202 11:15:59.263966 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c77290d-3574-4eb4-ab26-abf2f666367b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6c77290d-3574-4eb4-ab26-abf2f666367b" (UID: "6c77290d-3574-4eb4-ab26-abf2f666367b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:15:59 crc kubenswrapper[4838]: I0202 11:15:59.271912 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c77290d-3574-4eb4-ab26-abf2f666367b-kube-api-access-vb7cc" (OuterVolumeSpecName: "kube-api-access-vb7cc") pod "6c77290d-3574-4eb4-ab26-abf2f666367b" (UID: "6c77290d-3574-4eb4-ab26-abf2f666367b"). InnerVolumeSpecName "kube-api-access-vb7cc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:15:59 crc kubenswrapper[4838]: I0202 11:15:59.364783 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vb7cc\" (UniqueName: \"kubernetes.io/projected/6c77290d-3574-4eb4-ab26-abf2f666367b-kube-api-access-vb7cc\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:59 crc kubenswrapper[4838]: I0202 11:15:59.365063 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c77290d-3574-4eb4-ab26-abf2f666367b-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:15:59 crc kubenswrapper[4838]: I0202 11:15:59.664950 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0331ec29-4194-41b3-a014-bdf3f869aa17","Type":"ContainerStarted","Data":"8098bc11265af193d59f114420487861af4105435ead9ff5ace406b4a1a0c04d"} Feb 02 11:15:59 crc kubenswrapper[4838]: I0202 11:15:59.667046 4838 generic.go:334] "Generic (PLEG): container finished" podID="8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3" containerID="30ba2e023fa61f40d57ac45157d0816e07d0ffa2117bfb9862c75fcc972e1bcb" exitCode=0 Feb 02 11:15:59 crc kubenswrapper[4838]: I0202 11:15:59.667113 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-2b0b-account-create-update-kk8h4" event={"ID":"8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3","Type":"ContainerDied","Data":"30ba2e023fa61f40d57ac45157d0816e07d0ffa2117bfb9862c75fcc972e1bcb"} Feb 02 11:15:59 crc kubenswrapper[4838]: I0202 11:15:59.669025 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-bx8gk" event={"ID":"6c77290d-3574-4eb4-ab26-abf2f666367b","Type":"ContainerDied","Data":"4d28615ece9b3ff7f5dc10cda05fa7841f11621b3dc1bb2c6b7d8dc77ba77af8"} Feb 02 11:15:59 crc kubenswrapper[4838]: I0202 11:15:59.669063 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d28615ece9b3ff7f5dc10cda05fa7841f11621b3dc1bb2c6b7d8dc77ba77af8" Feb 02 11:15:59 crc kubenswrapper[4838]: I0202 11:15:59.669087 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-bx8gk" Feb 02 11:16:00 crc kubenswrapper[4838]: I0202 11:16:00.804845 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 02 11:16:00 crc kubenswrapper[4838]: I0202 11:16:00.805129 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 02 11:16:00 crc kubenswrapper[4838]: I0202 11:16:00.878998 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 02 11:16:00 crc kubenswrapper[4838]: I0202 11:16:00.881482 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 02 11:16:01 crc kubenswrapper[4838]: I0202 11:16:01.060160 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2b0b-account-create-update-kk8h4" Feb 02 11:16:01 crc kubenswrapper[4838]: I0202 11:16:01.213046 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmxzx\" (UniqueName: \"kubernetes.io/projected/8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3-kube-api-access-dmxzx\") pod \"8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3\" (UID: \"8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3\") " Feb 02 11:16:01 crc kubenswrapper[4838]: I0202 11:16:01.213254 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3-operator-scripts\") pod \"8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3\" (UID: \"8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3\") " Feb 02 11:16:01 crc kubenswrapper[4838]: I0202 11:16:01.213960 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3" (UID: "8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:16:01 crc kubenswrapper[4838]: I0202 11:16:01.219067 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3-kube-api-access-dmxzx" (OuterVolumeSpecName: "kube-api-access-dmxzx") pod "8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3" (UID: "8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3"). InnerVolumeSpecName "kube-api-access-dmxzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:16:01 crc kubenswrapper[4838]: I0202 11:16:01.315310 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmxzx\" (UniqueName: \"kubernetes.io/projected/8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3-kube-api-access-dmxzx\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:01 crc kubenswrapper[4838]: I0202 11:16:01.315345 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:01 crc kubenswrapper[4838]: I0202 11:16:01.689959 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2b0b-account-create-update-kk8h4" Feb 02 11:16:01 crc kubenswrapper[4838]: I0202 11:16:01.692834 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-2b0b-account-create-update-kk8h4" event={"ID":"8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3","Type":"ContainerDied","Data":"d9ad92c78d8ab6e65a8cce1b16f176d5971b28b0853c551da8af62a0e29e8499"} Feb 02 11:16:01 crc kubenswrapper[4838]: I0202 11:16:01.692936 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9ad92c78d8ab6e65a8cce1b16f176d5971b28b0853c551da8af62a0e29e8499" Feb 02 11:16:01 crc kubenswrapper[4838]: I0202 11:16:01.692985 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 02 11:16:01 crc kubenswrapper[4838]: I0202 11:16:01.693003 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 02 11:16:03 crc kubenswrapper[4838]: I0202 11:16:03.597342 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 02 11:16:03 crc kubenswrapper[4838]: I0202 11:16:03.705694 4838 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 11:16:03 crc kubenswrapper[4838]: I0202 11:16:03.720342 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 02 11:16:04 crc kubenswrapper[4838]: I0202 11:16:04.192022 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 02 11:16:04 crc kubenswrapper[4838]: I0202 11:16:04.193308 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 02 11:16:04 crc kubenswrapper[4838]: I0202 11:16:04.223631 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 02 11:16:04 crc kubenswrapper[4838]: I0202 11:16:04.264456 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 02 11:16:04 crc kubenswrapper[4838]: I0202 11:16:04.717522 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0331ec29-4194-41b3-a014-bdf3f869aa17","Type":"ContainerStarted","Data":"0fa937d050468e6319818c17cdc68d0d981aa9f217aeee95a83c81ad5177854b"} Feb 02 11:16:04 crc kubenswrapper[4838]: I0202 11:16:04.717903 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 02 11:16:04 crc kubenswrapper[4838]: I0202 11:16:04.717919 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 02 11:16:05 crc kubenswrapper[4838]: I0202 11:16:05.725481 4838 generic.go:334] "Generic (PLEG): container finished" podID="052c9a16-08e7-4892-8bcd-dc3055e14ca8" containerID="a4509ee30dd40d9774a51e328127f4308faaa33f39254883fe22b5aed9fbc5da" exitCode=0 Feb 02 11:16:05 crc kubenswrapper[4838]: I0202 11:16:05.725556 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-bh9vr" event={"ID":"052c9a16-08e7-4892-8bcd-dc3055e14ca8","Type":"ContainerDied","Data":"a4509ee30dd40d9774a51e328127f4308faaa33f39254883fe22b5aed9fbc5da"} Feb 02 11:16:06 crc kubenswrapper[4838]: I0202 11:16:06.621475 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 02 11:16:06 crc kubenswrapper[4838]: I0202 11:16:06.649455 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 02 11:16:06 crc kubenswrapper[4838]: I0202 11:16:06.736718 4838 generic.go:334] "Generic (PLEG): container finished" podID="d369bf9a-8e44-4186-8513-9f73bd321e6e" containerID="858527d4cfb040fb4f32e431c29bb41f26498554fd87d58c448f438b4a7a8c7f" exitCode=0 Feb 02 11:16:06 crc kubenswrapper[4838]: I0202 11:16:06.736795 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-mpdzc" event={"ID":"d369bf9a-8e44-4186-8513-9f73bd321e6e","Type":"ContainerDied","Data":"858527d4cfb040fb4f32e431c29bb41f26498554fd87d58c448f438b4a7a8c7f"} Feb 02 11:16:06 crc kubenswrapper[4838]: I0202 11:16:06.743388 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0331ec29-4194-41b3-a014-bdf3f869aa17","Type":"ContainerStarted","Data":"97fcfb2ca66f65eb91f90a8d8a986c7d26cc28a9624ff181d601bd2fc2014830"} Feb 02 11:16:07 crc kubenswrapper[4838]: I0202 11:16:07.231162 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-bh9vr" Feb 02 11:16:07 crc kubenswrapper[4838]: I0202 11:16:07.337561 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/052c9a16-08e7-4892-8bcd-dc3055e14ca8-operator-scripts\") pod \"052c9a16-08e7-4892-8bcd-dc3055e14ca8\" (UID: \"052c9a16-08e7-4892-8bcd-dc3055e14ca8\") " Feb 02 11:16:07 crc kubenswrapper[4838]: I0202 11:16:07.337769 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbrf4\" (UniqueName: \"kubernetes.io/projected/052c9a16-08e7-4892-8bcd-dc3055e14ca8-kube-api-access-qbrf4\") pod \"052c9a16-08e7-4892-8bcd-dc3055e14ca8\" (UID: \"052c9a16-08e7-4892-8bcd-dc3055e14ca8\") " Feb 02 11:16:07 crc kubenswrapper[4838]: I0202 11:16:07.338776 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/052c9a16-08e7-4892-8bcd-dc3055e14ca8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "052c9a16-08e7-4892-8bcd-dc3055e14ca8" (UID: "052c9a16-08e7-4892-8bcd-dc3055e14ca8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:16:07 crc kubenswrapper[4838]: I0202 11:16:07.343545 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/052c9a16-08e7-4892-8bcd-dc3055e14ca8-kube-api-access-qbrf4" (OuterVolumeSpecName: "kube-api-access-qbrf4") pod "052c9a16-08e7-4892-8bcd-dc3055e14ca8" (UID: "052c9a16-08e7-4892-8bcd-dc3055e14ca8"). InnerVolumeSpecName "kube-api-access-qbrf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:16:07 crc kubenswrapper[4838]: I0202 11:16:07.439430 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbrf4\" (UniqueName: \"kubernetes.io/projected/052c9a16-08e7-4892-8bcd-dc3055e14ca8-kube-api-access-qbrf4\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:07 crc kubenswrapper[4838]: I0202 11:16:07.439676 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/052c9a16-08e7-4892-8bcd-dc3055e14ca8-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:07 crc kubenswrapper[4838]: I0202 11:16:07.787098 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-bh9vr" Feb 02 11:16:07 crc kubenswrapper[4838]: I0202 11:16:07.788760 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-bh9vr" event={"ID":"052c9a16-08e7-4892-8bcd-dc3055e14ca8","Type":"ContainerDied","Data":"ea72f8d29382767d6ff11b188a458dde5d64f8f84405b1c765d3f5e7085698e5"} Feb 02 11:16:07 crc kubenswrapper[4838]: I0202 11:16:07.788831 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea72f8d29382767d6ff11b188a458dde5d64f8f84405b1c765d3f5e7085698e5" Feb 02 11:16:08 crc kubenswrapper[4838]: I0202 11:16:08.104951 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-mpdzc" Feb 02 11:16:08 crc kubenswrapper[4838]: I0202 11:16:08.158447 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d369bf9a-8e44-4186-8513-9f73bd321e6e-operator-scripts\") pod \"d369bf9a-8e44-4186-8513-9f73bd321e6e\" (UID: \"d369bf9a-8e44-4186-8513-9f73bd321e6e\") " Feb 02 11:16:08 crc kubenswrapper[4838]: I0202 11:16:08.158739 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-blv4n\" (UniqueName: \"kubernetes.io/projected/d369bf9a-8e44-4186-8513-9f73bd321e6e-kube-api-access-blv4n\") pod \"d369bf9a-8e44-4186-8513-9f73bd321e6e\" (UID: \"d369bf9a-8e44-4186-8513-9f73bd321e6e\") " Feb 02 11:16:08 crc kubenswrapper[4838]: I0202 11:16:08.159921 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d369bf9a-8e44-4186-8513-9f73bd321e6e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d369bf9a-8e44-4186-8513-9f73bd321e6e" (UID: "d369bf9a-8e44-4186-8513-9f73bd321e6e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:16:08 crc kubenswrapper[4838]: I0202 11:16:08.183752 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d369bf9a-8e44-4186-8513-9f73bd321e6e-kube-api-access-blv4n" (OuterVolumeSpecName: "kube-api-access-blv4n") pod "d369bf9a-8e44-4186-8513-9f73bd321e6e" (UID: "d369bf9a-8e44-4186-8513-9f73bd321e6e"). InnerVolumeSpecName "kube-api-access-blv4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:16:08 crc kubenswrapper[4838]: I0202 11:16:08.262174 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d369bf9a-8e44-4186-8513-9f73bd321e6e-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:08 crc kubenswrapper[4838]: I0202 11:16:08.262215 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-blv4n\" (UniqueName: \"kubernetes.io/projected/d369bf9a-8e44-4186-8513-9f73bd321e6e-kube-api-access-blv4n\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:08 crc kubenswrapper[4838]: I0202 11:16:08.799236 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-mpdzc" event={"ID":"d369bf9a-8e44-4186-8513-9f73bd321e6e","Type":"ContainerDied","Data":"0523e6f9371b83d8fa564b2fcaf1c2e3875f816ee2dcae8ee62a23c3d4353542"} Feb 02 11:16:08 crc kubenswrapper[4838]: I0202 11:16:08.799284 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0523e6f9371b83d8fa564b2fcaf1c2e3875f816ee2dcae8ee62a23c3d4353542" Feb 02 11:16:08 crc kubenswrapper[4838]: I0202 11:16:08.800548 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-mpdzc" Feb 02 11:16:09 crc kubenswrapper[4838]: I0202 11:16:09.812811 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0331ec29-4194-41b3-a014-bdf3f869aa17","Type":"ContainerStarted","Data":"66a4723a5aced9c5cf310c77b8cebb6e8be7ea1ae636ff0df70156943ad70d07"} Feb 02 11:16:09 crc kubenswrapper[4838]: I0202 11:16:09.813262 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 11:16:09 crc kubenswrapper[4838]: I0202 11:16:09.846537 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.884118825 podStartE2EDuration="14.846513724s" podCreationTimestamp="2026-02-02 11:15:55 +0000 UTC" firstStartedPulling="2026-02-02 11:15:56.784959029 +0000 UTC m=+1351.122060057" lastFinishedPulling="2026-02-02 11:16:08.747353938 +0000 UTC m=+1363.084454956" observedRunningTime="2026-02-02 11:16:09.841087921 +0000 UTC m=+1364.178188959" watchObservedRunningTime="2026-02-02 11:16:09.846513724 +0000 UTC m=+1364.183614762" Feb 02 11:16:11 crc kubenswrapper[4838]: I0202 11:16:11.830009 4838 generic.go:334] "Generic (PLEG): container finished" podID="04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55" containerID="0b5d55f1358badab9b4cea5a00f3aaa32f6b79b2e6923f7aa38805adfd89f216" exitCode=0 Feb 02 11:16:11 crc kubenswrapper[4838]: I0202 11:16:11.830090 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-e31d-account-create-update-xckgb" event={"ID":"04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55","Type":"ContainerDied","Data":"0b5d55f1358badab9b4cea5a00f3aaa32f6b79b2e6923f7aa38805adfd89f216"} Feb 02 11:16:11 crc kubenswrapper[4838]: I0202 11:16:11.833432 4838 generic.go:334] "Generic (PLEG): container finished" podID="c77c3373-fdbf-4a38-8ab0-ce701577f29f" containerID="093cc634de90b5c2024fdd65ae71c20107d325c1d7fb2acfa1d56c855a7890cb" exitCode=0 Feb 02 11:16:11 crc kubenswrapper[4838]: I0202 11:16:11.833471 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5f6e-account-create-update-jp75l" event={"ID":"c77c3373-fdbf-4a38-8ab0-ce701577f29f","Type":"ContainerDied","Data":"093cc634de90b5c2024fdd65ae71c20107d325c1d7fb2acfa1d56c855a7890cb"} Feb 02 11:16:13 crc kubenswrapper[4838]: I0202 11:16:13.275869 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-e31d-account-create-update-xckgb" Feb 02 11:16:13 crc kubenswrapper[4838]: I0202 11:16:13.285558 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5f6e-account-create-update-jp75l" Feb 02 11:16:13 crc kubenswrapper[4838]: I0202 11:16:13.461262 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c77c3373-fdbf-4a38-8ab0-ce701577f29f-operator-scripts\") pod \"c77c3373-fdbf-4a38-8ab0-ce701577f29f\" (UID: \"c77c3373-fdbf-4a38-8ab0-ce701577f29f\") " Feb 02 11:16:13 crc kubenswrapper[4838]: I0202 11:16:13.461324 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2k8x\" (UniqueName: \"kubernetes.io/projected/c77c3373-fdbf-4a38-8ab0-ce701577f29f-kube-api-access-f2k8x\") pod \"c77c3373-fdbf-4a38-8ab0-ce701577f29f\" (UID: \"c77c3373-fdbf-4a38-8ab0-ce701577f29f\") " Feb 02 11:16:13 crc kubenswrapper[4838]: I0202 11:16:13.461369 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drbhc\" (UniqueName: \"kubernetes.io/projected/04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55-kube-api-access-drbhc\") pod \"04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55\" (UID: \"04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55\") " Feb 02 11:16:13 crc kubenswrapper[4838]: I0202 11:16:13.461415 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55-operator-scripts\") pod \"04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55\" (UID: \"04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55\") " Feb 02 11:16:13 crc kubenswrapper[4838]: I0202 11:16:13.462148 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c77c3373-fdbf-4a38-8ab0-ce701577f29f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c77c3373-fdbf-4a38-8ab0-ce701577f29f" (UID: "c77c3373-fdbf-4a38-8ab0-ce701577f29f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:16:13 crc kubenswrapper[4838]: I0202 11:16:13.462389 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55" (UID: "04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:16:13 crc kubenswrapper[4838]: I0202 11:16:13.467353 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55-kube-api-access-drbhc" (OuterVolumeSpecName: "kube-api-access-drbhc") pod "04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55" (UID: "04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55"). InnerVolumeSpecName "kube-api-access-drbhc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:16:13 crc kubenswrapper[4838]: I0202 11:16:13.467778 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c77c3373-fdbf-4a38-8ab0-ce701577f29f-kube-api-access-f2k8x" (OuterVolumeSpecName: "kube-api-access-f2k8x") pod "c77c3373-fdbf-4a38-8ab0-ce701577f29f" (UID: "c77c3373-fdbf-4a38-8ab0-ce701577f29f"). InnerVolumeSpecName "kube-api-access-f2k8x". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:16:13 crc kubenswrapper[4838]: I0202 11:16:13.564190 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c77c3373-fdbf-4a38-8ab0-ce701577f29f-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:13 crc kubenswrapper[4838]: I0202 11:16:13.564223 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2k8x\" (UniqueName: \"kubernetes.io/projected/c77c3373-fdbf-4a38-8ab0-ce701577f29f-kube-api-access-f2k8x\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:13 crc kubenswrapper[4838]: I0202 11:16:13.564235 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drbhc\" (UniqueName: \"kubernetes.io/projected/04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55-kube-api-access-drbhc\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:13 crc kubenswrapper[4838]: I0202 11:16:13.564243 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:13 crc kubenswrapper[4838]: I0202 11:16:13.850325 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-e31d-account-create-update-xckgb" Feb 02 11:16:13 crc kubenswrapper[4838]: I0202 11:16:13.850316 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-e31d-account-create-update-xckgb" event={"ID":"04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55","Type":"ContainerDied","Data":"88d102ddb480b4b9cb317a1b716def63232f747be3e6e71fa74408d42a02f813"} Feb 02 11:16:13 crc kubenswrapper[4838]: I0202 11:16:13.850529 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88d102ddb480b4b9cb317a1b716def63232f747be3e6e71fa74408d42a02f813" Feb 02 11:16:13 crc kubenswrapper[4838]: I0202 11:16:13.852085 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5f6e-account-create-update-jp75l" event={"ID":"c77c3373-fdbf-4a38-8ab0-ce701577f29f","Type":"ContainerDied","Data":"6df96c07fadf519e4d11b6b4682d888cf0dfcc578d458c63cce5eb1fc22ba175"} Feb 02 11:16:13 crc kubenswrapper[4838]: I0202 11:16:13.852124 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6df96c07fadf519e4d11b6b4682d888cf0dfcc578d458c63cce5eb1fc22ba175" Feb 02 11:16:13 crc kubenswrapper[4838]: I0202 11:16:13.852153 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5f6e-account-create-update-jp75l" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.353819 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-4r9v4"] Feb 02 11:16:14 crc kubenswrapper[4838]: E0202 11:16:14.355262 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55" containerName="mariadb-account-create-update" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.355349 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55" containerName="mariadb-account-create-update" Feb 02 11:16:14 crc kubenswrapper[4838]: E0202 11:16:14.355435 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3" containerName="mariadb-account-create-update" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.355498 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3" containerName="mariadb-account-create-update" Feb 02 11:16:14 crc kubenswrapper[4838]: E0202 11:16:14.355567 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="052c9a16-08e7-4892-8bcd-dc3055e14ca8" containerName="mariadb-database-create" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.355644 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="052c9a16-08e7-4892-8bcd-dc3055e14ca8" containerName="mariadb-database-create" Feb 02 11:16:14 crc kubenswrapper[4838]: E0202 11:16:14.355711 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c77290d-3574-4eb4-ab26-abf2f666367b" containerName="mariadb-database-create" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.355774 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c77290d-3574-4eb4-ab26-abf2f666367b" containerName="mariadb-database-create" Feb 02 11:16:14 crc kubenswrapper[4838]: E0202 11:16:14.355838 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d369bf9a-8e44-4186-8513-9f73bd321e6e" containerName="mariadb-database-create" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.355900 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="d369bf9a-8e44-4186-8513-9f73bd321e6e" containerName="mariadb-database-create" Feb 02 11:16:14 crc kubenswrapper[4838]: E0202 11:16:14.355971 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c77c3373-fdbf-4a38-8ab0-ce701577f29f" containerName="mariadb-account-create-update" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.356024 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="c77c3373-fdbf-4a38-8ab0-ce701577f29f" containerName="mariadb-account-create-update" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.356274 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="052c9a16-08e7-4892-8bcd-dc3055e14ca8" containerName="mariadb-database-create" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.356366 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="c77c3373-fdbf-4a38-8ab0-ce701577f29f" containerName="mariadb-account-create-update" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.356439 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55" containerName="mariadb-account-create-update" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.356499 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="d369bf9a-8e44-4186-8513-9f73bd321e6e" containerName="mariadb-database-create" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.356556 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3" containerName="mariadb-account-create-update" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.356652 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c77290d-3574-4eb4-ab26-abf2f666367b" containerName="mariadb-database-create" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.357427 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-4r9v4" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.359697 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.359949 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-5x2vp" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.360494 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.362400 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-4r9v4"] Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.379215 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-4r9v4\" (UID: \"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d\") " pod="openstack/nova-cell0-conductor-db-sync-4r9v4" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.379327 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4s2d\" (UniqueName: \"kubernetes.io/projected/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-kube-api-access-f4s2d\") pod \"nova-cell0-conductor-db-sync-4r9v4\" (UID: \"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d\") " pod="openstack/nova-cell0-conductor-db-sync-4r9v4" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.379355 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-scripts\") pod \"nova-cell0-conductor-db-sync-4r9v4\" (UID: \"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d\") " pod="openstack/nova-cell0-conductor-db-sync-4r9v4" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.379434 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-config-data\") pod \"nova-cell0-conductor-db-sync-4r9v4\" (UID: \"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d\") " pod="openstack/nova-cell0-conductor-db-sync-4r9v4" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.481129 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-4r9v4\" (UID: \"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d\") " pod="openstack/nova-cell0-conductor-db-sync-4r9v4" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.481251 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4s2d\" (UniqueName: \"kubernetes.io/projected/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-kube-api-access-f4s2d\") pod \"nova-cell0-conductor-db-sync-4r9v4\" (UID: \"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d\") " pod="openstack/nova-cell0-conductor-db-sync-4r9v4" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.481281 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-scripts\") pod \"nova-cell0-conductor-db-sync-4r9v4\" (UID: \"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d\") " pod="openstack/nova-cell0-conductor-db-sync-4r9v4" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.481327 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-config-data\") pod \"nova-cell0-conductor-db-sync-4r9v4\" (UID: \"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d\") " pod="openstack/nova-cell0-conductor-db-sync-4r9v4" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.486267 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-scripts\") pod \"nova-cell0-conductor-db-sync-4r9v4\" (UID: \"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d\") " pod="openstack/nova-cell0-conductor-db-sync-4r9v4" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.487942 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-config-data\") pod \"nova-cell0-conductor-db-sync-4r9v4\" (UID: \"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d\") " pod="openstack/nova-cell0-conductor-db-sync-4r9v4" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.490501 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-4r9v4\" (UID: \"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d\") " pod="openstack/nova-cell0-conductor-db-sync-4r9v4" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.497697 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4s2d\" (UniqueName: \"kubernetes.io/projected/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-kube-api-access-f4s2d\") pod \"nova-cell0-conductor-db-sync-4r9v4\" (UID: \"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d\") " pod="openstack/nova-cell0-conductor-db-sync-4r9v4" Feb 02 11:16:14 crc kubenswrapper[4838]: I0202 11:16:14.674555 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-4r9v4" Feb 02 11:16:15 crc kubenswrapper[4838]: I0202 11:16:15.430744 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:16:15 crc kubenswrapper[4838]: I0202 11:16:15.430848 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:16:15 crc kubenswrapper[4838]: I0202 11:16:15.483257 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-4r9v4"] Feb 02 11:16:15 crc kubenswrapper[4838]: I0202 11:16:15.871717 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-4r9v4" event={"ID":"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d","Type":"ContainerStarted","Data":"9c13c41d0101d379bcf38fcbee927c7361193071fe464a0f57193d12f6a2d1de"} Feb 02 11:16:20 crc kubenswrapper[4838]: I0202 11:16:20.891653 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9ztsj"] Feb 02 11:16:20 crc kubenswrapper[4838]: I0202 11:16:20.895492 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9ztsj" Feb 02 11:16:20 crc kubenswrapper[4838]: I0202 11:16:20.900988 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9ztsj"] Feb 02 11:16:21 crc kubenswrapper[4838]: I0202 11:16:21.039316 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/370d896a-37f9-4510-8800-4002a77c104d-catalog-content\") pod \"redhat-operators-9ztsj\" (UID: \"370d896a-37f9-4510-8800-4002a77c104d\") " pod="openshift-marketplace/redhat-operators-9ztsj" Feb 02 11:16:21 crc kubenswrapper[4838]: I0202 11:16:21.039394 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qw57c\" (UniqueName: \"kubernetes.io/projected/370d896a-37f9-4510-8800-4002a77c104d-kube-api-access-qw57c\") pod \"redhat-operators-9ztsj\" (UID: \"370d896a-37f9-4510-8800-4002a77c104d\") " pod="openshift-marketplace/redhat-operators-9ztsj" Feb 02 11:16:21 crc kubenswrapper[4838]: I0202 11:16:21.039429 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/370d896a-37f9-4510-8800-4002a77c104d-utilities\") pod \"redhat-operators-9ztsj\" (UID: \"370d896a-37f9-4510-8800-4002a77c104d\") " pod="openshift-marketplace/redhat-operators-9ztsj" Feb 02 11:16:21 crc kubenswrapper[4838]: I0202 11:16:21.142095 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/370d896a-37f9-4510-8800-4002a77c104d-catalog-content\") pod \"redhat-operators-9ztsj\" (UID: \"370d896a-37f9-4510-8800-4002a77c104d\") " pod="openshift-marketplace/redhat-operators-9ztsj" Feb 02 11:16:21 crc kubenswrapper[4838]: I0202 11:16:21.142174 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qw57c\" (UniqueName: \"kubernetes.io/projected/370d896a-37f9-4510-8800-4002a77c104d-kube-api-access-qw57c\") pod \"redhat-operators-9ztsj\" (UID: \"370d896a-37f9-4510-8800-4002a77c104d\") " pod="openshift-marketplace/redhat-operators-9ztsj" Feb 02 11:16:21 crc kubenswrapper[4838]: I0202 11:16:21.142205 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/370d896a-37f9-4510-8800-4002a77c104d-utilities\") pod \"redhat-operators-9ztsj\" (UID: \"370d896a-37f9-4510-8800-4002a77c104d\") " pod="openshift-marketplace/redhat-operators-9ztsj" Feb 02 11:16:21 crc kubenswrapper[4838]: I0202 11:16:21.142841 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/370d896a-37f9-4510-8800-4002a77c104d-utilities\") pod \"redhat-operators-9ztsj\" (UID: \"370d896a-37f9-4510-8800-4002a77c104d\") " pod="openshift-marketplace/redhat-operators-9ztsj" Feb 02 11:16:21 crc kubenswrapper[4838]: I0202 11:16:21.142885 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/370d896a-37f9-4510-8800-4002a77c104d-catalog-content\") pod \"redhat-operators-9ztsj\" (UID: \"370d896a-37f9-4510-8800-4002a77c104d\") " pod="openshift-marketplace/redhat-operators-9ztsj" Feb 02 11:16:21 crc kubenswrapper[4838]: I0202 11:16:21.171499 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qw57c\" (UniqueName: \"kubernetes.io/projected/370d896a-37f9-4510-8800-4002a77c104d-kube-api-access-qw57c\") pod \"redhat-operators-9ztsj\" (UID: \"370d896a-37f9-4510-8800-4002a77c104d\") " pod="openshift-marketplace/redhat-operators-9ztsj" Feb 02 11:16:21 crc kubenswrapper[4838]: I0202 11:16:21.222990 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9ztsj" Feb 02 11:16:26 crc kubenswrapper[4838]: I0202 11:16:26.251752 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 02 11:16:28 crc kubenswrapper[4838]: I0202 11:16:28.785073 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9ztsj"] Feb 02 11:16:29 crc kubenswrapper[4838]: W0202 11:16:29.041109 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod370d896a_37f9_4510_8800_4002a77c104d.slice/crio-544c32ece08fbf763d311161c39c7eaf3d997303f73339f41334018988e8b5ce WatchSource:0}: Error finding container 544c32ece08fbf763d311161c39c7eaf3d997303f73339f41334018988e8b5ce: Status 404 returned error can't find the container with id 544c32ece08fbf763d311161c39c7eaf3d997303f73339f41334018988e8b5ce Feb 02 11:16:29 crc kubenswrapper[4838]: E0202 11:16:29.488539 4838 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod370d896a_37f9_4510_8800_4002a77c104d.slice/crio-748342545ab7293a783e976545de062708d67a3d1fb017dcc3b8f125a247606e.scope\": RecentStats: unable to find data in memory cache]" Feb 02 11:16:30 crc kubenswrapper[4838]: I0202 11:16:30.002151 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-4r9v4" event={"ID":"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d","Type":"ContainerStarted","Data":"5d5e907869030a55ad318e1d933bff3c790413853b7dba2978c0d1386f6b4b5c"} Feb 02 11:16:30 crc kubenswrapper[4838]: I0202 11:16:30.005097 4838 generic.go:334] "Generic (PLEG): container finished" podID="370d896a-37f9-4510-8800-4002a77c104d" containerID="748342545ab7293a783e976545de062708d67a3d1fb017dcc3b8f125a247606e" exitCode=0 Feb 02 11:16:30 crc kubenswrapper[4838]: I0202 11:16:30.005174 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9ztsj" event={"ID":"370d896a-37f9-4510-8800-4002a77c104d","Type":"ContainerDied","Data":"748342545ab7293a783e976545de062708d67a3d1fb017dcc3b8f125a247606e"} Feb 02 11:16:30 crc kubenswrapper[4838]: I0202 11:16:30.005196 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9ztsj" event={"ID":"370d896a-37f9-4510-8800-4002a77c104d","Type":"ContainerStarted","Data":"544c32ece08fbf763d311161c39c7eaf3d997303f73339f41334018988e8b5ce"} Feb 02 11:16:30 crc kubenswrapper[4838]: I0202 11:16:30.007178 4838 generic.go:334] "Generic (PLEG): container finished" podID="50879d46-58dc-4716-89fd-bc68eea3bd2e" containerID="ac1a6fcf0eca00396a708ee597872b3453fc12310c0a4d0f7e014f7839628f48" exitCode=0 Feb 02 11:16:30 crc kubenswrapper[4838]: I0202 11:16:30.007235 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-4dkm9" event={"ID":"50879d46-58dc-4716-89fd-bc68eea3bd2e","Type":"ContainerDied","Data":"ac1a6fcf0eca00396a708ee597872b3453fc12310c0a4d0f7e014f7839628f48"} Feb 02 11:16:30 crc kubenswrapper[4838]: I0202 11:16:30.021527 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-4r9v4" podStartSLOduration=2.421906633 podStartE2EDuration="16.02150867s" podCreationTimestamp="2026-02-02 11:16:14 +0000 UTC" firstStartedPulling="2026-02-02 11:16:15.486048397 +0000 UTC m=+1369.823149445" lastFinishedPulling="2026-02-02 11:16:29.085650454 +0000 UTC m=+1383.422751482" observedRunningTime="2026-02-02 11:16:30.018444369 +0000 UTC m=+1384.355545407" watchObservedRunningTime="2026-02-02 11:16:30.02150867 +0000 UTC m=+1384.358609698" Feb 02 11:16:30 crc kubenswrapper[4838]: I0202 11:16:30.587344 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:16:30 crc kubenswrapper[4838]: I0202 11:16:30.587657 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0331ec29-4194-41b3-a014-bdf3f869aa17" containerName="ceilometer-central-agent" containerID="cri-o://8098bc11265af193d59f114420487861af4105435ead9ff5ace406b4a1a0c04d" gracePeriod=30 Feb 02 11:16:30 crc kubenswrapper[4838]: I0202 11:16:30.587809 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0331ec29-4194-41b3-a014-bdf3f869aa17" containerName="proxy-httpd" containerID="cri-o://66a4723a5aced9c5cf310c77b8cebb6e8be7ea1ae636ff0df70156943ad70d07" gracePeriod=30 Feb 02 11:16:30 crc kubenswrapper[4838]: I0202 11:16:30.587871 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0331ec29-4194-41b3-a014-bdf3f869aa17" containerName="sg-core" containerID="cri-o://97fcfb2ca66f65eb91f90a8d8a986c7d26cc28a9624ff181d601bd2fc2014830" gracePeriod=30 Feb 02 11:16:30 crc kubenswrapper[4838]: I0202 11:16:30.587913 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0331ec29-4194-41b3-a014-bdf3f869aa17" containerName="ceilometer-notification-agent" containerID="cri-o://0fa937d050468e6319818c17cdc68d0d981aa9f217aeee95a83c81ad5177854b" gracePeriod=30 Feb 02 11:16:31 crc kubenswrapper[4838]: I0202 11:16:31.023793 4838 generic.go:334] "Generic (PLEG): container finished" podID="0331ec29-4194-41b3-a014-bdf3f869aa17" containerID="66a4723a5aced9c5cf310c77b8cebb6e8be7ea1ae636ff0df70156943ad70d07" exitCode=0 Feb 02 11:16:31 crc kubenswrapper[4838]: I0202 11:16:31.023853 4838 generic.go:334] "Generic (PLEG): container finished" podID="0331ec29-4194-41b3-a014-bdf3f869aa17" containerID="97fcfb2ca66f65eb91f90a8d8a986c7d26cc28a9624ff181d601bd2fc2014830" exitCode=2 Feb 02 11:16:31 crc kubenswrapper[4838]: I0202 11:16:31.023866 4838 generic.go:334] "Generic (PLEG): container finished" podID="0331ec29-4194-41b3-a014-bdf3f869aa17" containerID="8098bc11265af193d59f114420487861af4105435ead9ff5ace406b4a1a0c04d" exitCode=0 Feb 02 11:16:31 crc kubenswrapper[4838]: I0202 11:16:31.023892 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0331ec29-4194-41b3-a014-bdf3f869aa17","Type":"ContainerDied","Data":"66a4723a5aced9c5cf310c77b8cebb6e8be7ea1ae636ff0df70156943ad70d07"} Feb 02 11:16:31 crc kubenswrapper[4838]: I0202 11:16:31.023953 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0331ec29-4194-41b3-a014-bdf3f869aa17","Type":"ContainerDied","Data":"97fcfb2ca66f65eb91f90a8d8a986c7d26cc28a9624ff181d601bd2fc2014830"} Feb 02 11:16:31 crc kubenswrapper[4838]: I0202 11:16:31.023966 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0331ec29-4194-41b3-a014-bdf3f869aa17","Type":"ContainerDied","Data":"8098bc11265af193d59f114420487861af4105435ead9ff5ace406b4a1a0c04d"} Feb 02 11:16:31 crc kubenswrapper[4838]: I0202 11:16:31.338281 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-4dkm9" Feb 02 11:16:31 crc kubenswrapper[4838]: I0202 11:16:31.469139 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50879d46-58dc-4716-89fd-bc68eea3bd2e-combined-ca-bundle\") pod \"50879d46-58dc-4716-89fd-bc68eea3bd2e\" (UID: \"50879d46-58dc-4716-89fd-bc68eea3bd2e\") " Feb 02 11:16:31 crc kubenswrapper[4838]: I0202 11:16:31.469278 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/50879d46-58dc-4716-89fd-bc68eea3bd2e-db-sync-config-data\") pod \"50879d46-58dc-4716-89fd-bc68eea3bd2e\" (UID: \"50879d46-58dc-4716-89fd-bc68eea3bd2e\") " Feb 02 11:16:31 crc kubenswrapper[4838]: I0202 11:16:31.469346 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cwvnq\" (UniqueName: \"kubernetes.io/projected/50879d46-58dc-4716-89fd-bc68eea3bd2e-kube-api-access-cwvnq\") pod \"50879d46-58dc-4716-89fd-bc68eea3bd2e\" (UID: \"50879d46-58dc-4716-89fd-bc68eea3bd2e\") " Feb 02 11:16:31 crc kubenswrapper[4838]: I0202 11:16:31.476414 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50879d46-58dc-4716-89fd-bc68eea3bd2e-kube-api-access-cwvnq" (OuterVolumeSpecName: "kube-api-access-cwvnq") pod "50879d46-58dc-4716-89fd-bc68eea3bd2e" (UID: "50879d46-58dc-4716-89fd-bc68eea3bd2e"). InnerVolumeSpecName "kube-api-access-cwvnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:16:31 crc kubenswrapper[4838]: I0202 11:16:31.518068 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50879d46-58dc-4716-89fd-bc68eea3bd2e-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "50879d46-58dc-4716-89fd-bc68eea3bd2e" (UID: "50879d46-58dc-4716-89fd-bc68eea3bd2e"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:16:31 crc kubenswrapper[4838]: I0202 11:16:31.518414 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50879d46-58dc-4716-89fd-bc68eea3bd2e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "50879d46-58dc-4716-89fd-bc68eea3bd2e" (UID: "50879d46-58dc-4716-89fd-bc68eea3bd2e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:16:31 crc kubenswrapper[4838]: I0202 11:16:31.572354 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cwvnq\" (UniqueName: \"kubernetes.io/projected/50879d46-58dc-4716-89fd-bc68eea3bd2e-kube-api-access-cwvnq\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:31 crc kubenswrapper[4838]: I0202 11:16:31.572715 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50879d46-58dc-4716-89fd-bc68eea3bd2e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:31 crc kubenswrapper[4838]: I0202 11:16:31.572800 4838 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/50879d46-58dc-4716-89fd-bc68eea3bd2e-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.035159 4838 generic.go:334] "Generic (PLEG): container finished" podID="370d896a-37f9-4510-8800-4002a77c104d" containerID="31f19b37edef2c2273ffe39f037f3687877b04c191b724412ab05a977a8a1412" exitCode=0 Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.035267 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9ztsj" event={"ID":"370d896a-37f9-4510-8800-4002a77c104d","Type":"ContainerDied","Data":"31f19b37edef2c2273ffe39f037f3687877b04c191b724412ab05a977a8a1412"} Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.038170 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-4dkm9" event={"ID":"50879d46-58dc-4716-89fd-bc68eea3bd2e","Type":"ContainerDied","Data":"1527b659f79909f196019e151677f9611a1d0cce700721fe1c3d76bdec3141f2"} Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.038207 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1527b659f79909f196019e151677f9611a1d0cce700721fe1c3d76bdec3141f2" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.038278 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-4dkm9" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.362671 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-5545c69c4f-jpg4z"] Feb 02 11:16:32 crc kubenswrapper[4838]: E0202 11:16:32.363472 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50879d46-58dc-4716-89fd-bc68eea3bd2e" containerName="barbican-db-sync" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.363499 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="50879d46-58dc-4716-89fd-bc68eea3bd2e" containerName="barbican-db-sync" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.363760 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="50879d46-58dc-4716-89fd-bc68eea3bd2e" containerName="barbican-db-sync" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.365006 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5545c69c4f-jpg4z" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.367438 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.368238 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.368789 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-crmvz" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.381553 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt"] Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.383379 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.390466 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5545c69c4f-jpg4z"] Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.396913 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.412063 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt"] Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.498612 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba0aafb6-d810-4b44-8e8a-eebc89abad94-config-data\") pod \"barbican-worker-5545c69c4f-jpg4z\" (UID: \"ba0aafb6-d810-4b44-8e8a-eebc89abad94\") " pod="openstack/barbican-worker-5545c69c4f-jpg4z" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.498768 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4001ac84-f8ab-436b-b526-23940d7f0463-config-data-custom\") pod \"barbican-keystone-listener-6c7fd57b7d-nvvkt\" (UID: \"4001ac84-f8ab-436b-b526-23940d7f0463\") " pod="openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.498839 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba0aafb6-d810-4b44-8e8a-eebc89abad94-combined-ca-bundle\") pod \"barbican-worker-5545c69c4f-jpg4z\" (UID: \"ba0aafb6-d810-4b44-8e8a-eebc89abad94\") " pod="openstack/barbican-worker-5545c69c4f-jpg4z" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.498878 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v59nv\" (UniqueName: \"kubernetes.io/projected/ba0aafb6-d810-4b44-8e8a-eebc89abad94-kube-api-access-v59nv\") pod \"barbican-worker-5545c69c4f-jpg4z\" (UID: \"ba0aafb6-d810-4b44-8e8a-eebc89abad94\") " pod="openstack/barbican-worker-5545c69c4f-jpg4z" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.498943 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjmlr\" (UniqueName: \"kubernetes.io/projected/4001ac84-f8ab-436b-b526-23940d7f0463-kube-api-access-vjmlr\") pod \"barbican-keystone-listener-6c7fd57b7d-nvvkt\" (UID: \"4001ac84-f8ab-436b-b526-23940d7f0463\") " pod="openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.498965 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4001ac84-f8ab-436b-b526-23940d7f0463-logs\") pod \"barbican-keystone-listener-6c7fd57b7d-nvvkt\" (UID: \"4001ac84-f8ab-436b-b526-23940d7f0463\") " pod="openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.499014 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4001ac84-f8ab-436b-b526-23940d7f0463-config-data\") pod \"barbican-keystone-listener-6c7fd57b7d-nvvkt\" (UID: \"4001ac84-f8ab-436b-b526-23940d7f0463\") " pod="openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.499032 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4001ac84-f8ab-436b-b526-23940d7f0463-combined-ca-bundle\") pod \"barbican-keystone-listener-6c7fd57b7d-nvvkt\" (UID: \"4001ac84-f8ab-436b-b526-23940d7f0463\") " pod="openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.499056 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ba0aafb6-d810-4b44-8e8a-eebc89abad94-config-data-custom\") pod \"barbican-worker-5545c69c4f-jpg4z\" (UID: \"ba0aafb6-d810-4b44-8e8a-eebc89abad94\") " pod="openstack/barbican-worker-5545c69c4f-jpg4z" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.499076 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba0aafb6-d810-4b44-8e8a-eebc89abad94-logs\") pod \"barbican-worker-5545c69c4f-jpg4z\" (UID: \"ba0aafb6-d810-4b44-8e8a-eebc89abad94\") " pod="openstack/barbican-worker-5545c69c4f-jpg4z" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.587908 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-z48qj"] Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.593728 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.600518 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v59nv\" (UniqueName: \"kubernetes.io/projected/ba0aafb6-d810-4b44-8e8a-eebc89abad94-kube-api-access-v59nv\") pod \"barbican-worker-5545c69c4f-jpg4z\" (UID: \"ba0aafb6-d810-4b44-8e8a-eebc89abad94\") " pod="openstack/barbican-worker-5545c69c4f-jpg4z" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.600598 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjmlr\" (UniqueName: \"kubernetes.io/projected/4001ac84-f8ab-436b-b526-23940d7f0463-kube-api-access-vjmlr\") pod \"barbican-keystone-listener-6c7fd57b7d-nvvkt\" (UID: \"4001ac84-f8ab-436b-b526-23940d7f0463\") " pod="openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.600642 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4001ac84-f8ab-436b-b526-23940d7f0463-logs\") pod \"barbican-keystone-listener-6c7fd57b7d-nvvkt\" (UID: \"4001ac84-f8ab-436b-b526-23940d7f0463\") " pod="openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.600687 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4001ac84-f8ab-436b-b526-23940d7f0463-config-data\") pod \"barbican-keystone-listener-6c7fd57b7d-nvvkt\" (UID: \"4001ac84-f8ab-436b-b526-23940d7f0463\") " pod="openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.600712 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4001ac84-f8ab-436b-b526-23940d7f0463-combined-ca-bundle\") pod \"barbican-keystone-listener-6c7fd57b7d-nvvkt\" (UID: \"4001ac84-f8ab-436b-b526-23940d7f0463\") " pod="openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.600738 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ba0aafb6-d810-4b44-8e8a-eebc89abad94-config-data-custom\") pod \"barbican-worker-5545c69c4f-jpg4z\" (UID: \"ba0aafb6-d810-4b44-8e8a-eebc89abad94\") " pod="openstack/barbican-worker-5545c69c4f-jpg4z" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.600761 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba0aafb6-d810-4b44-8e8a-eebc89abad94-logs\") pod \"barbican-worker-5545c69c4f-jpg4z\" (UID: \"ba0aafb6-d810-4b44-8e8a-eebc89abad94\") " pod="openstack/barbican-worker-5545c69c4f-jpg4z" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.600900 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba0aafb6-d810-4b44-8e8a-eebc89abad94-config-data\") pod \"barbican-worker-5545c69c4f-jpg4z\" (UID: \"ba0aafb6-d810-4b44-8e8a-eebc89abad94\") " pod="openstack/barbican-worker-5545c69c4f-jpg4z" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.600924 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4001ac84-f8ab-436b-b526-23940d7f0463-config-data-custom\") pod \"barbican-keystone-listener-6c7fd57b7d-nvvkt\" (UID: \"4001ac84-f8ab-436b-b526-23940d7f0463\") " pod="openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.600966 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba0aafb6-d810-4b44-8e8a-eebc89abad94-combined-ca-bundle\") pod \"barbican-worker-5545c69c4f-jpg4z\" (UID: \"ba0aafb6-d810-4b44-8e8a-eebc89abad94\") " pod="openstack/barbican-worker-5545c69c4f-jpg4z" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.601723 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba0aafb6-d810-4b44-8e8a-eebc89abad94-logs\") pod \"barbican-worker-5545c69c4f-jpg4z\" (UID: \"ba0aafb6-d810-4b44-8e8a-eebc89abad94\") " pod="openstack/barbican-worker-5545c69c4f-jpg4z" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.601765 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4001ac84-f8ab-436b-b526-23940d7f0463-logs\") pod \"barbican-keystone-listener-6c7fd57b7d-nvvkt\" (UID: \"4001ac84-f8ab-436b-b526-23940d7f0463\") " pod="openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.607403 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-z48qj"] Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.619428 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ba0aafb6-d810-4b44-8e8a-eebc89abad94-config-data-custom\") pod \"barbican-worker-5545c69c4f-jpg4z\" (UID: \"ba0aafb6-d810-4b44-8e8a-eebc89abad94\") " pod="openstack/barbican-worker-5545c69c4f-jpg4z" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.619568 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba0aafb6-d810-4b44-8e8a-eebc89abad94-config-data\") pod \"barbican-worker-5545c69c4f-jpg4z\" (UID: \"ba0aafb6-d810-4b44-8e8a-eebc89abad94\") " pod="openstack/barbican-worker-5545c69c4f-jpg4z" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.623841 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4001ac84-f8ab-436b-b526-23940d7f0463-combined-ca-bundle\") pod \"barbican-keystone-listener-6c7fd57b7d-nvvkt\" (UID: \"4001ac84-f8ab-436b-b526-23940d7f0463\") " pod="openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.632983 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4001ac84-f8ab-436b-b526-23940d7f0463-config-data\") pod \"barbican-keystone-listener-6c7fd57b7d-nvvkt\" (UID: \"4001ac84-f8ab-436b-b526-23940d7f0463\") " pod="openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.633402 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba0aafb6-d810-4b44-8e8a-eebc89abad94-combined-ca-bundle\") pod \"barbican-worker-5545c69c4f-jpg4z\" (UID: \"ba0aafb6-d810-4b44-8e8a-eebc89abad94\") " pod="openstack/barbican-worker-5545c69c4f-jpg4z" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.639012 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4001ac84-f8ab-436b-b526-23940d7f0463-config-data-custom\") pod \"barbican-keystone-listener-6c7fd57b7d-nvvkt\" (UID: \"4001ac84-f8ab-436b-b526-23940d7f0463\") " pod="openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.637655 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjmlr\" (UniqueName: \"kubernetes.io/projected/4001ac84-f8ab-436b-b526-23940d7f0463-kube-api-access-vjmlr\") pod \"barbican-keystone-listener-6c7fd57b7d-nvvkt\" (UID: \"4001ac84-f8ab-436b-b526-23940d7f0463\") " pod="openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.651399 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v59nv\" (UniqueName: \"kubernetes.io/projected/ba0aafb6-d810-4b44-8e8a-eebc89abad94-kube-api-access-v59nv\") pod \"barbican-worker-5545c69c4f-jpg4z\" (UID: \"ba0aafb6-d810-4b44-8e8a-eebc89abad94\") " pod="openstack/barbican-worker-5545c69c4f-jpg4z" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.682298 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5545c69c4f-jpg4z" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.703309 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7c794d67d-m7bgs"] Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.704429 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfjk2\" (UniqueName: \"kubernetes.io/projected/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-kube-api-access-xfjk2\") pod \"dnsmasq-dns-586bdc5f9-z48qj\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.704484 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-ovsdbserver-nb\") pod \"dnsmasq-dns-586bdc5f9-z48qj\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.704502 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-ovsdbserver-sb\") pod \"dnsmasq-dns-586bdc5f9-z48qj\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.704570 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-dns-svc\") pod \"dnsmasq-dns-586bdc5f9-z48qj\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.704666 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-dns-swift-storage-0\") pod \"dnsmasq-dns-586bdc5f9-z48qj\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.704800 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-config\") pod \"dnsmasq-dns-586bdc5f9-z48qj\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.706165 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.708523 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.716163 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.739842 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7c794d67d-m7bgs"] Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.806834 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tm4qn\" (UniqueName: \"kubernetes.io/projected/2e02e508-a0b4-415f-a020-98d39c663483-kube-api-access-tm4qn\") pod \"barbican-api-7c794d67d-m7bgs\" (UID: \"2e02e508-a0b4-415f-a020-98d39c663483\") " pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.807129 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e02e508-a0b4-415f-a020-98d39c663483-logs\") pod \"barbican-api-7c794d67d-m7bgs\" (UID: \"2e02e508-a0b4-415f-a020-98d39c663483\") " pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.807153 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-dns-svc\") pod \"dnsmasq-dns-586bdc5f9-z48qj\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.807201 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2e02e508-a0b4-415f-a020-98d39c663483-config-data-custom\") pod \"barbican-api-7c794d67d-m7bgs\" (UID: \"2e02e508-a0b4-415f-a020-98d39c663483\") " pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.807234 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-dns-swift-storage-0\") pod \"dnsmasq-dns-586bdc5f9-z48qj\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.807272 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-config\") pod \"dnsmasq-dns-586bdc5f9-z48qj\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.807296 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e02e508-a0b4-415f-a020-98d39c663483-combined-ca-bundle\") pod \"barbican-api-7c794d67d-m7bgs\" (UID: \"2e02e508-a0b4-415f-a020-98d39c663483\") " pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.807326 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfjk2\" (UniqueName: \"kubernetes.io/projected/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-kube-api-access-xfjk2\") pod \"dnsmasq-dns-586bdc5f9-z48qj\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.807348 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e02e508-a0b4-415f-a020-98d39c663483-config-data\") pod \"barbican-api-7c794d67d-m7bgs\" (UID: \"2e02e508-a0b4-415f-a020-98d39c663483\") " pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.807374 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-ovsdbserver-nb\") pod \"dnsmasq-dns-586bdc5f9-z48qj\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.807390 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-ovsdbserver-sb\") pod \"dnsmasq-dns-586bdc5f9-z48qj\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.808636 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-ovsdbserver-sb\") pod \"dnsmasq-dns-586bdc5f9-z48qj\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.808864 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-config\") pod \"dnsmasq-dns-586bdc5f9-z48qj\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.809560 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-dns-svc\") pod \"dnsmasq-dns-586bdc5f9-z48qj\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.810045 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-ovsdbserver-nb\") pod \"dnsmasq-dns-586bdc5f9-z48qj\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.810053 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-dns-swift-storage-0\") pod \"dnsmasq-dns-586bdc5f9-z48qj\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.830386 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfjk2\" (UniqueName: \"kubernetes.io/projected/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-kube-api-access-xfjk2\") pod \"dnsmasq-dns-586bdc5f9-z48qj\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.890492 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.911588 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2e02e508-a0b4-415f-a020-98d39c663483-config-data-custom\") pod \"barbican-api-7c794d67d-m7bgs\" (UID: \"2e02e508-a0b4-415f-a020-98d39c663483\") " pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.911739 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e02e508-a0b4-415f-a020-98d39c663483-combined-ca-bundle\") pod \"barbican-api-7c794d67d-m7bgs\" (UID: \"2e02e508-a0b4-415f-a020-98d39c663483\") " pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.911807 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e02e508-a0b4-415f-a020-98d39c663483-config-data\") pod \"barbican-api-7c794d67d-m7bgs\" (UID: \"2e02e508-a0b4-415f-a020-98d39c663483\") " pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.912548 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tm4qn\" (UniqueName: \"kubernetes.io/projected/2e02e508-a0b4-415f-a020-98d39c663483-kube-api-access-tm4qn\") pod \"barbican-api-7c794d67d-m7bgs\" (UID: \"2e02e508-a0b4-415f-a020-98d39c663483\") " pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.912645 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e02e508-a0b4-415f-a020-98d39c663483-logs\") pod \"barbican-api-7c794d67d-m7bgs\" (UID: \"2e02e508-a0b4-415f-a020-98d39c663483\") " pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.918684 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e02e508-a0b4-415f-a020-98d39c663483-logs\") pod \"barbican-api-7c794d67d-m7bgs\" (UID: \"2e02e508-a0b4-415f-a020-98d39c663483\") " pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.918910 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e02e508-a0b4-415f-a020-98d39c663483-combined-ca-bundle\") pod \"barbican-api-7c794d67d-m7bgs\" (UID: \"2e02e508-a0b4-415f-a020-98d39c663483\") " pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.920770 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2e02e508-a0b4-415f-a020-98d39c663483-config-data-custom\") pod \"barbican-api-7c794d67d-m7bgs\" (UID: \"2e02e508-a0b4-415f-a020-98d39c663483\") " pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.921000 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e02e508-a0b4-415f-a020-98d39c663483-config-data\") pod \"barbican-api-7c794d67d-m7bgs\" (UID: \"2e02e508-a0b4-415f-a020-98d39c663483\") " pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:32 crc kubenswrapper[4838]: I0202 11:16:32.938838 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tm4qn\" (UniqueName: \"kubernetes.io/projected/2e02e508-a0b4-415f-a020-98d39c663483-kube-api-access-tm4qn\") pod \"barbican-api-7c794d67d-m7bgs\" (UID: \"2e02e508-a0b4-415f-a020-98d39c663483\") " pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:33 crc kubenswrapper[4838]: I0202 11:16:33.211909 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:33 crc kubenswrapper[4838]: I0202 11:16:33.308443 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5545c69c4f-jpg4z"] Feb 02 11:16:33 crc kubenswrapper[4838]: W0202 11:16:33.319259 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba0aafb6_d810_4b44_8e8a_eebc89abad94.slice/crio-d439c8a03d7236d8f927936df644db318d558589e7038610c20caadcf86e9ca5 WatchSource:0}: Error finding container d439c8a03d7236d8f927936df644db318d558589e7038610c20caadcf86e9ca5: Status 404 returned error can't find the container with id d439c8a03d7236d8f927936df644db318d558589e7038610c20caadcf86e9ca5 Feb 02 11:16:33 crc kubenswrapper[4838]: I0202 11:16:33.370916 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt"] Feb 02 11:16:33 crc kubenswrapper[4838]: I0202 11:16:33.545656 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-z48qj"] Feb 02 11:16:33 crc kubenswrapper[4838]: I0202 11:16:33.712858 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7c794d67d-m7bgs"] Feb 02 11:16:33 crc kubenswrapper[4838]: W0202 11:16:33.867945 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2e02e508_a0b4_415f_a020_98d39c663483.slice/crio-d42ff584a053002c4c80eec24ae954884fb2fdfe59baf4c27047a8a9572240e3 WatchSource:0}: Error finding container d42ff584a053002c4c80eec24ae954884fb2fdfe59baf4c27047a8a9572240e3: Status 404 returned error can't find the container with id d42ff584a053002c4c80eec24ae954884fb2fdfe59baf4c27047a8a9572240e3 Feb 02 11:16:34 crc kubenswrapper[4838]: I0202 11:16:34.096782 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c794d67d-m7bgs" event={"ID":"2e02e508-a0b4-415f-a020-98d39c663483","Type":"ContainerStarted","Data":"d42ff584a053002c4c80eec24ae954884fb2fdfe59baf4c27047a8a9572240e3"} Feb 02 11:16:34 crc kubenswrapper[4838]: I0202 11:16:34.099344 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5545c69c4f-jpg4z" event={"ID":"ba0aafb6-d810-4b44-8e8a-eebc89abad94","Type":"ContainerStarted","Data":"d439c8a03d7236d8f927936df644db318d558589e7038610c20caadcf86e9ca5"} Feb 02 11:16:34 crc kubenswrapper[4838]: I0202 11:16:34.100884 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" event={"ID":"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40","Type":"ContainerStarted","Data":"a5de07ad9dde56678bf71535ce8d3631424be7e19eb08c8a323649a2383f590c"} Feb 02 11:16:34 crc kubenswrapper[4838]: I0202 11:16:34.102717 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt" event={"ID":"4001ac84-f8ab-436b-b526-23940d7f0463","Type":"ContainerStarted","Data":"10e9b527418e940616c1169892c057603d97bd537383ae725552c1f882c4b3f3"} Feb 02 11:16:35 crc kubenswrapper[4838]: I0202 11:16:35.120050 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" event={"ID":"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40","Type":"ContainerStarted","Data":"9c6cc28d011a5de1d1e3cd3db4675ae585ba4453035f010600b5c6580fe47653"} Feb 02 11:16:35 crc kubenswrapper[4838]: I0202 11:16:35.122332 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c794d67d-m7bgs" event={"ID":"2e02e508-a0b4-415f-a020-98d39c663483","Type":"ContainerStarted","Data":"b50c9e2d52d1edde38b5fb4e522cf93e8959beffa6a892de7628550268a1ce0b"} Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.134455 4838 generic.go:334] "Generic (PLEG): container finished" podID="0331ec29-4194-41b3-a014-bdf3f869aa17" containerID="0fa937d050468e6319818c17cdc68d0d981aa9f217aeee95a83c81ad5177854b" exitCode=0 Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.134489 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0331ec29-4194-41b3-a014-bdf3f869aa17","Type":"ContainerDied","Data":"0fa937d050468e6319818c17cdc68d0d981aa9f217aeee95a83c81ad5177854b"} Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.138022 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c794d67d-m7bgs" event={"ID":"2e02e508-a0b4-415f-a020-98d39c663483","Type":"ContainerStarted","Data":"7d5bb5d9de96729f138eb26c32c40200d811eef9cd8af9f50521718a8019cf36"} Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.138537 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.138801 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.142397 4838 generic.go:334] "Generic (PLEG): container finished" podID="182e6c68-3df4-42a6-ba33-e5b7ffbe8f40" containerID="9c6cc28d011a5de1d1e3cd3db4675ae585ba4453035f010600b5c6580fe47653" exitCode=0 Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.142433 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" event={"ID":"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40","Type":"ContainerDied","Data":"9c6cc28d011a5de1d1e3cd3db4675ae585ba4453035f010600b5c6580fe47653"} Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.164853 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7c794d67d-m7bgs" podStartSLOduration=4.164832829 podStartE2EDuration="4.164832829s" podCreationTimestamp="2026-02-02 11:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:16:36.159230991 +0000 UTC m=+1390.496332029" watchObservedRunningTime="2026-02-02 11:16:36.164832829 +0000 UTC m=+1390.501933867" Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.328162 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.328680 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="039ec177-4229-482e-aeec-ec3db4349951" containerName="kube-state-metrics" containerID="cri-o://36042f2a57238fe93af81bcf7422de7a6317bd77934346fa9b0ad87a995f4266" gracePeriod=30 Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.777541 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.894386 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0331ec29-4194-41b3-a014-bdf3f869aa17-log-httpd\") pod \"0331ec29-4194-41b3-a014-bdf3f869aa17\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.894525 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-config-data\") pod \"0331ec29-4194-41b3-a014-bdf3f869aa17\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.894582 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-scripts\") pod \"0331ec29-4194-41b3-a014-bdf3f869aa17\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.894732 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-combined-ca-bundle\") pod \"0331ec29-4194-41b3-a014-bdf3f869aa17\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.894763 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0331ec29-4194-41b3-a014-bdf3f869aa17-run-httpd\") pod \"0331ec29-4194-41b3-a014-bdf3f869aa17\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.894794 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-sg-core-conf-yaml\") pod \"0331ec29-4194-41b3-a014-bdf3f869aa17\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.895016 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2gd6\" (UniqueName: \"kubernetes.io/projected/0331ec29-4194-41b3-a014-bdf3f869aa17-kube-api-access-h2gd6\") pod \"0331ec29-4194-41b3-a014-bdf3f869aa17\" (UID: \"0331ec29-4194-41b3-a014-bdf3f869aa17\") " Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.896265 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0331ec29-4194-41b3-a014-bdf3f869aa17-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0331ec29-4194-41b3-a014-bdf3f869aa17" (UID: "0331ec29-4194-41b3-a014-bdf3f869aa17"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.896569 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0331ec29-4194-41b3-a014-bdf3f869aa17-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0331ec29-4194-41b3-a014-bdf3f869aa17" (UID: "0331ec29-4194-41b3-a014-bdf3f869aa17"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.901994 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-scripts" (OuterVolumeSpecName: "scripts") pod "0331ec29-4194-41b3-a014-bdf3f869aa17" (UID: "0331ec29-4194-41b3-a014-bdf3f869aa17"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.907068 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0331ec29-4194-41b3-a014-bdf3f869aa17-kube-api-access-h2gd6" (OuterVolumeSpecName: "kube-api-access-h2gd6") pod "0331ec29-4194-41b3-a014-bdf3f869aa17" (UID: "0331ec29-4194-41b3-a014-bdf3f869aa17"). InnerVolumeSpecName "kube-api-access-h2gd6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.932369 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 11:16:36 crc kubenswrapper[4838]: I0202 11:16:36.937487 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0331ec29-4194-41b3-a014-bdf3f869aa17" (UID: "0331ec29-4194-41b3-a014-bdf3f869aa17"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.000864 4838 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0331ec29-4194-41b3-a014-bdf3f869aa17-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.001111 4838 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.001189 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2gd6\" (UniqueName: \"kubernetes.io/projected/0331ec29-4194-41b3-a014-bdf3f869aa17-kube-api-access-h2gd6\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.001263 4838 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0331ec29-4194-41b3-a014-bdf3f869aa17-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.001341 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.013855 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0331ec29-4194-41b3-a014-bdf3f869aa17" (UID: "0331ec29-4194-41b3-a014-bdf3f869aa17"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.026374 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-config-data" (OuterVolumeSpecName: "config-data") pod "0331ec29-4194-41b3-a014-bdf3f869aa17" (UID: "0331ec29-4194-41b3-a014-bdf3f869aa17"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.102931 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5w7jg\" (UniqueName: \"kubernetes.io/projected/039ec177-4229-482e-aeec-ec3db4349951-kube-api-access-5w7jg\") pod \"039ec177-4229-482e-aeec-ec3db4349951\" (UID: \"039ec177-4229-482e-aeec-ec3db4349951\") " Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.103494 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.103522 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0331ec29-4194-41b3-a014-bdf3f869aa17-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.106502 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/039ec177-4229-482e-aeec-ec3db4349951-kube-api-access-5w7jg" (OuterVolumeSpecName: "kube-api-access-5w7jg") pod "039ec177-4229-482e-aeec-ec3db4349951" (UID: "039ec177-4229-482e-aeec-ec3db4349951"). InnerVolumeSpecName "kube-api-access-5w7jg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.152957 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" event={"ID":"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40","Type":"ContainerStarted","Data":"97e346f34790565e1b0bc34bcc83af1a6aed322f148ac649e8f245aba9672915"} Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.153418 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.155878 4838 generic.go:334] "Generic (PLEG): container finished" podID="039ec177-4229-482e-aeec-ec3db4349951" containerID="36042f2a57238fe93af81bcf7422de7a6317bd77934346fa9b0ad87a995f4266" exitCode=2 Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.156014 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"039ec177-4229-482e-aeec-ec3db4349951","Type":"ContainerDied","Data":"36042f2a57238fe93af81bcf7422de7a6317bd77934346fa9b0ad87a995f4266"} Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.156073 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"039ec177-4229-482e-aeec-ec3db4349951","Type":"ContainerDied","Data":"eb07b2c8863508ae875df9407933d8494ad49e5ca459ba7d09e2f977705d658e"} Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.156097 4838 scope.go:117] "RemoveContainer" containerID="36042f2a57238fe93af81bcf7422de7a6317bd77934346fa9b0ad87a995f4266" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.156318 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.163981 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.163984 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0331ec29-4194-41b3-a014-bdf3f869aa17","Type":"ContainerDied","Data":"27687f42c9604160214d7570dd0bafc5be4d54142a9485404fc3e2c17453cae3"} Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.182063 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9ztsj" event={"ID":"370d896a-37f9-4510-8800-4002a77c104d","Type":"ContainerStarted","Data":"c3aa15aa0daa6cd898bb97889a0838e9eefafd05b6e970db29db513eccdcbf64"} Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.189351 4838 scope.go:117] "RemoveContainer" containerID="36042f2a57238fe93af81bcf7422de7a6317bd77934346fa9b0ad87a995f4266" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.189522 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" podStartSLOduration=5.189508348 podStartE2EDuration="5.189508348s" podCreationTimestamp="2026-02-02 11:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:16:37.182304218 +0000 UTC m=+1391.519405246" watchObservedRunningTime="2026-02-02 11:16:37.189508348 +0000 UTC m=+1391.526609386" Feb 02 11:16:37 crc kubenswrapper[4838]: E0202 11:16:37.189928 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36042f2a57238fe93af81bcf7422de7a6317bd77934346fa9b0ad87a995f4266\": container with ID starting with 36042f2a57238fe93af81bcf7422de7a6317bd77934346fa9b0ad87a995f4266 not found: ID does not exist" containerID="36042f2a57238fe93af81bcf7422de7a6317bd77934346fa9b0ad87a995f4266" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.189973 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36042f2a57238fe93af81bcf7422de7a6317bd77934346fa9b0ad87a995f4266"} err="failed to get container status \"36042f2a57238fe93af81bcf7422de7a6317bd77934346fa9b0ad87a995f4266\": rpc error: code = NotFound desc = could not find container \"36042f2a57238fe93af81bcf7422de7a6317bd77934346fa9b0ad87a995f4266\": container with ID starting with 36042f2a57238fe93af81bcf7422de7a6317bd77934346fa9b0ad87a995f4266 not found: ID does not exist" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.189998 4838 scope.go:117] "RemoveContainer" containerID="66a4723a5aced9c5cf310c77b8cebb6e8be7ea1ae636ff0df70156943ad70d07" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.205273 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5w7jg\" (UniqueName: \"kubernetes.io/projected/039ec177-4229-482e-aeec-ec3db4349951-kube-api-access-5w7jg\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.214120 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9ztsj" podStartSLOduration=10.701081832 podStartE2EDuration="17.214095817s" podCreationTimestamp="2026-02-02 11:16:20 +0000 UTC" firstStartedPulling="2026-02-02 11:16:30.008720562 +0000 UTC m=+1384.345821590" lastFinishedPulling="2026-02-02 11:16:36.521734547 +0000 UTC m=+1390.858835575" observedRunningTime="2026-02-02 11:16:37.205384587 +0000 UTC m=+1391.542485635" watchObservedRunningTime="2026-02-02 11:16:37.214095817 +0000 UTC m=+1391.551196845" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.231107 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.259261 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.315797 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.342306 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.354862 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 11:16:37 crc kubenswrapper[4838]: E0202 11:16:37.355375 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0331ec29-4194-41b3-a014-bdf3f869aa17" containerName="sg-core" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.355394 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="0331ec29-4194-41b3-a014-bdf3f869aa17" containerName="sg-core" Feb 02 11:16:37 crc kubenswrapper[4838]: E0202 11:16:37.355409 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0331ec29-4194-41b3-a014-bdf3f869aa17" containerName="proxy-httpd" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.355415 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="0331ec29-4194-41b3-a014-bdf3f869aa17" containerName="proxy-httpd" Feb 02 11:16:37 crc kubenswrapper[4838]: E0202 11:16:37.355428 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0331ec29-4194-41b3-a014-bdf3f869aa17" containerName="ceilometer-central-agent" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.355435 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="0331ec29-4194-41b3-a014-bdf3f869aa17" containerName="ceilometer-central-agent" Feb 02 11:16:37 crc kubenswrapper[4838]: E0202 11:16:37.355452 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="039ec177-4229-482e-aeec-ec3db4349951" containerName="kube-state-metrics" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.355458 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="039ec177-4229-482e-aeec-ec3db4349951" containerName="kube-state-metrics" Feb 02 11:16:37 crc kubenswrapper[4838]: E0202 11:16:37.355469 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0331ec29-4194-41b3-a014-bdf3f869aa17" containerName="ceilometer-notification-agent" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.355474 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="0331ec29-4194-41b3-a014-bdf3f869aa17" containerName="ceilometer-notification-agent" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.355657 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="039ec177-4229-482e-aeec-ec3db4349951" containerName="kube-state-metrics" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.355674 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="0331ec29-4194-41b3-a014-bdf3f869aa17" containerName="proxy-httpd" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.355690 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="0331ec29-4194-41b3-a014-bdf3f869aa17" containerName="ceilometer-notification-agent" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.355704 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="0331ec29-4194-41b3-a014-bdf3f869aa17" containerName="sg-core" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.355713 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="0331ec29-4194-41b3-a014-bdf3f869aa17" containerName="ceilometer-central-agent" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.356359 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.358352 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.358533 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.359134 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-kc74s" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.374364 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.388991 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.392019 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.395346 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.395653 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.405978 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.514654 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-config-data\") pod \"ceilometer-0\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.514721 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/253cbeb6-ec5a-404a-904c-d06b377ed987-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"253cbeb6-ec5a-404a-904c-d06b377ed987\") " pod="openstack/kube-state-metrics-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.514759 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/253cbeb6-ec5a-404a-904c-d06b377ed987-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"253cbeb6-ec5a-404a-904c-d06b377ed987\") " pod="openstack/kube-state-metrics-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.514780 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-log-httpd\") pod \"ceilometer-0\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.514829 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/253cbeb6-ec5a-404a-904c-d06b377ed987-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"253cbeb6-ec5a-404a-904c-d06b377ed987\") " pod="openstack/kube-state-metrics-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.514857 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-run-httpd\") pod \"ceilometer-0\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.514883 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.514946 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-scripts\") pod \"ceilometer-0\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.514990 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsnkb\" (UniqueName: \"kubernetes.io/projected/253cbeb6-ec5a-404a-904c-d06b377ed987-kube-api-access-xsnkb\") pod \"kube-state-metrics-0\" (UID: \"253cbeb6-ec5a-404a-904c-d06b377ed987\") " pod="openstack/kube-state-metrics-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.515025 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwvmn\" (UniqueName: \"kubernetes.io/projected/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-kube-api-access-xwvmn\") pod \"ceilometer-0\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.515048 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.551841 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-859b6c8866-ltwgg"] Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.556742 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.558973 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.558989 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.566958 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-859b6c8866-ltwgg"] Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.616516 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/253cbeb6-ec5a-404a-904c-d06b377ed987-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"253cbeb6-ec5a-404a-904c-d06b377ed987\") " pod="openstack/kube-state-metrics-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.616571 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-log-httpd\") pod \"ceilometer-0\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.616669 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/253cbeb6-ec5a-404a-904c-d06b377ed987-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"253cbeb6-ec5a-404a-904c-d06b377ed987\") " pod="openstack/kube-state-metrics-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.616719 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-run-httpd\") pod \"ceilometer-0\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.616738 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.616812 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-scripts\") pod \"ceilometer-0\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.616875 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsnkb\" (UniqueName: \"kubernetes.io/projected/253cbeb6-ec5a-404a-904c-d06b377ed987-kube-api-access-xsnkb\") pod \"kube-state-metrics-0\" (UID: \"253cbeb6-ec5a-404a-904c-d06b377ed987\") " pod="openstack/kube-state-metrics-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.616918 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwvmn\" (UniqueName: \"kubernetes.io/projected/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-kube-api-access-xwvmn\") pod \"ceilometer-0\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.616949 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.616974 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-config-data\") pod \"ceilometer-0\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.616996 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/253cbeb6-ec5a-404a-904c-d06b377ed987-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"253cbeb6-ec5a-404a-904c-d06b377ed987\") " pod="openstack/kube-state-metrics-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.618904 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-log-httpd\") pod \"ceilometer-0\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.623779 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-run-httpd\") pod \"ceilometer-0\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.624802 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/253cbeb6-ec5a-404a-904c-d06b377ed987-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"253cbeb6-ec5a-404a-904c-d06b377ed987\") " pod="openstack/kube-state-metrics-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.626161 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-config-data\") pod \"ceilometer-0\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.628705 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/253cbeb6-ec5a-404a-904c-d06b377ed987-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"253cbeb6-ec5a-404a-904c-d06b377ed987\") " pod="openstack/kube-state-metrics-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.630019 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/253cbeb6-ec5a-404a-904c-d06b377ed987-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"253cbeb6-ec5a-404a-904c-d06b377ed987\") " pod="openstack/kube-state-metrics-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.630187 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.630584 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-scripts\") pod \"ceilometer-0\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.636372 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.646559 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsnkb\" (UniqueName: \"kubernetes.io/projected/253cbeb6-ec5a-404a-904c-d06b377ed987-kube-api-access-xsnkb\") pod \"kube-state-metrics-0\" (UID: \"253cbeb6-ec5a-404a-904c-d06b377ed987\") " pod="openstack/kube-state-metrics-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.664547 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwvmn\" (UniqueName: \"kubernetes.io/projected/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-kube-api-access-xwvmn\") pod \"ceilometer-0\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.679729 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.718526 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a08effe2-908e-4666-8f0d-2348c91376cf-logs\") pod \"barbican-api-859b6c8866-ltwgg\" (UID: \"a08effe2-908e-4666-8f0d-2348c91376cf\") " pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.718688 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a08effe2-908e-4666-8f0d-2348c91376cf-combined-ca-bundle\") pod \"barbican-api-859b6c8866-ltwgg\" (UID: \"a08effe2-908e-4666-8f0d-2348c91376cf\") " pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.718761 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a08effe2-908e-4666-8f0d-2348c91376cf-internal-tls-certs\") pod \"barbican-api-859b6c8866-ltwgg\" (UID: \"a08effe2-908e-4666-8f0d-2348c91376cf\") " pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.718888 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a08effe2-908e-4666-8f0d-2348c91376cf-public-tls-certs\") pod \"barbican-api-859b6c8866-ltwgg\" (UID: \"a08effe2-908e-4666-8f0d-2348c91376cf\") " pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.718951 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rz72p\" (UniqueName: \"kubernetes.io/projected/a08effe2-908e-4666-8f0d-2348c91376cf-kube-api-access-rz72p\") pod \"barbican-api-859b6c8866-ltwgg\" (UID: \"a08effe2-908e-4666-8f0d-2348c91376cf\") " pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.718978 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a08effe2-908e-4666-8f0d-2348c91376cf-config-data-custom\") pod \"barbican-api-859b6c8866-ltwgg\" (UID: \"a08effe2-908e-4666-8f0d-2348c91376cf\") " pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.718996 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a08effe2-908e-4666-8f0d-2348c91376cf-config-data\") pod \"barbican-api-859b6c8866-ltwgg\" (UID: \"a08effe2-908e-4666-8f0d-2348c91376cf\") " pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.719595 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.820317 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a08effe2-908e-4666-8f0d-2348c91376cf-combined-ca-bundle\") pod \"barbican-api-859b6c8866-ltwgg\" (UID: \"a08effe2-908e-4666-8f0d-2348c91376cf\") " pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.820416 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a08effe2-908e-4666-8f0d-2348c91376cf-internal-tls-certs\") pod \"barbican-api-859b6c8866-ltwgg\" (UID: \"a08effe2-908e-4666-8f0d-2348c91376cf\") " pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.820525 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a08effe2-908e-4666-8f0d-2348c91376cf-public-tls-certs\") pod \"barbican-api-859b6c8866-ltwgg\" (UID: \"a08effe2-908e-4666-8f0d-2348c91376cf\") " pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.820581 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rz72p\" (UniqueName: \"kubernetes.io/projected/a08effe2-908e-4666-8f0d-2348c91376cf-kube-api-access-rz72p\") pod \"barbican-api-859b6c8866-ltwgg\" (UID: \"a08effe2-908e-4666-8f0d-2348c91376cf\") " pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.820631 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a08effe2-908e-4666-8f0d-2348c91376cf-config-data-custom\") pod \"barbican-api-859b6c8866-ltwgg\" (UID: \"a08effe2-908e-4666-8f0d-2348c91376cf\") " pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.820655 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a08effe2-908e-4666-8f0d-2348c91376cf-config-data\") pod \"barbican-api-859b6c8866-ltwgg\" (UID: \"a08effe2-908e-4666-8f0d-2348c91376cf\") " pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.820690 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a08effe2-908e-4666-8f0d-2348c91376cf-logs\") pod \"barbican-api-859b6c8866-ltwgg\" (UID: \"a08effe2-908e-4666-8f0d-2348c91376cf\") " pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.821196 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a08effe2-908e-4666-8f0d-2348c91376cf-logs\") pod \"barbican-api-859b6c8866-ltwgg\" (UID: \"a08effe2-908e-4666-8f0d-2348c91376cf\") " pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.826722 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a08effe2-908e-4666-8f0d-2348c91376cf-public-tls-certs\") pod \"barbican-api-859b6c8866-ltwgg\" (UID: \"a08effe2-908e-4666-8f0d-2348c91376cf\") " pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.826761 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a08effe2-908e-4666-8f0d-2348c91376cf-config-data-custom\") pod \"barbican-api-859b6c8866-ltwgg\" (UID: \"a08effe2-908e-4666-8f0d-2348c91376cf\") " pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.827305 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a08effe2-908e-4666-8f0d-2348c91376cf-config-data\") pod \"barbican-api-859b6c8866-ltwgg\" (UID: \"a08effe2-908e-4666-8f0d-2348c91376cf\") " pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.827418 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a08effe2-908e-4666-8f0d-2348c91376cf-internal-tls-certs\") pod \"barbican-api-859b6c8866-ltwgg\" (UID: \"a08effe2-908e-4666-8f0d-2348c91376cf\") " pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.827867 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a08effe2-908e-4666-8f0d-2348c91376cf-combined-ca-bundle\") pod \"barbican-api-859b6c8866-ltwgg\" (UID: \"a08effe2-908e-4666-8f0d-2348c91376cf\") " pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.844442 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rz72p\" (UniqueName: \"kubernetes.io/projected/a08effe2-908e-4666-8f0d-2348c91376cf-kube-api-access-rz72p\") pod \"barbican-api-859b6c8866-ltwgg\" (UID: \"a08effe2-908e-4666-8f0d-2348c91376cf\") " pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:37 crc kubenswrapper[4838]: I0202 11:16:37.885743 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:38 crc kubenswrapper[4838]: I0202 11:16:38.516360 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0331ec29-4194-41b3-a014-bdf3f869aa17" path="/var/lib/kubelet/pods/0331ec29-4194-41b3-a014-bdf3f869aa17/volumes" Feb 02 11:16:38 crc kubenswrapper[4838]: I0202 11:16:38.517238 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="039ec177-4229-482e-aeec-ec3db4349951" path="/var/lib/kubelet/pods/039ec177-4229-482e-aeec-ec3db4349951/volumes" Feb 02 11:16:38 crc kubenswrapper[4838]: I0202 11:16:38.598900 4838 scope.go:117] "RemoveContainer" containerID="97fcfb2ca66f65eb91f90a8d8a986c7d26cc28a9624ff181d601bd2fc2014830" Feb 02 11:16:38 crc kubenswrapper[4838]: I0202 11:16:38.713022 4838 scope.go:117] "RemoveContainer" containerID="0fa937d050468e6319818c17cdc68d0d981aa9f217aeee95a83c81ad5177854b" Feb 02 11:16:38 crc kubenswrapper[4838]: I0202 11:16:38.939145 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:16:39 crc kubenswrapper[4838]: I0202 11:16:39.134208 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-859b6c8866-ltwgg"] Feb 02 11:16:39 crc kubenswrapper[4838]: I0202 11:16:39.207850 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 11:16:39 crc kubenswrapper[4838]: W0202 11:16:39.285191 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod253cbeb6_ec5a_404a_904c_d06b377ed987.slice/crio-3f31df0d326a064373d6ec76240edb2c2059d7ed87f49f5a2975014d08e19c43 WatchSource:0}: Error finding container 3f31df0d326a064373d6ec76240edb2c2059d7ed87f49f5a2975014d08e19c43: Status 404 returned error can't find the container with id 3f31df0d326a064373d6ec76240edb2c2059d7ed87f49f5a2975014d08e19c43 Feb 02 11:16:39 crc kubenswrapper[4838]: I0202 11:16:39.358884 4838 scope.go:117] "RemoveContainer" containerID="8098bc11265af193d59f114420487861af4105435ead9ff5ace406b4a1a0c04d" Feb 02 11:16:39 crc kubenswrapper[4838]: W0202 11:16:39.767562 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc101b37f_f6a8_49ff_a03c_cbae4a2f10ab.slice/crio-38a5f4d3134efeefaa9746f0248a12774156346474f9bab372bbf53a7cf533a1 WatchSource:0}: Error finding container 38a5f4d3134efeefaa9746f0248a12774156346474f9bab372bbf53a7cf533a1: Status 404 returned error can't find the container with id 38a5f4d3134efeefaa9746f0248a12774156346474f9bab372bbf53a7cf533a1 Feb 02 11:16:39 crc kubenswrapper[4838]: I0202 11:16:39.775793 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:16:40 crc kubenswrapper[4838]: I0202 11:16:40.215280 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab","Type":"ContainerStarted","Data":"38a5f4d3134efeefaa9746f0248a12774156346474f9bab372bbf53a7cf533a1"} Feb 02 11:16:40 crc kubenswrapper[4838]: I0202 11:16:40.218086 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"253cbeb6-ec5a-404a-904c-d06b377ed987","Type":"ContainerStarted","Data":"3f31df0d326a064373d6ec76240edb2c2059d7ed87f49f5a2975014d08e19c43"} Feb 02 11:16:40 crc kubenswrapper[4838]: I0202 11:16:40.219738 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-859b6c8866-ltwgg" event={"ID":"a08effe2-908e-4666-8f0d-2348c91376cf","Type":"ContainerStarted","Data":"2394e68ab1ee33ad5340757f2549092a71ee2434677dd7abcbb664421fe686a9"} Feb 02 11:16:41 crc kubenswrapper[4838]: I0202 11:16:41.224039 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9ztsj" Feb 02 11:16:41 crc kubenswrapper[4838]: I0202 11:16:41.224084 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9ztsj" Feb 02 11:16:41 crc kubenswrapper[4838]: I0202 11:16:41.231087 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-859b6c8866-ltwgg" event={"ID":"a08effe2-908e-4666-8f0d-2348c91376cf","Type":"ContainerStarted","Data":"571c99c0ad70cef66d5c24a15c4e1b230d52c72ba988b2068e6a89151f3fe73c"} Feb 02 11:16:42 crc kubenswrapper[4838]: I0202 11:16:42.241936 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5545c69c4f-jpg4z" event={"ID":"ba0aafb6-d810-4b44-8e8a-eebc89abad94","Type":"ContainerStarted","Data":"14bbb6e5f1668b976cf5ffc5c51be4f8674e0c35dccb161b7df723e8bb3ea754"} Feb 02 11:16:42 crc kubenswrapper[4838]: I0202 11:16:42.243643 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt" event={"ID":"4001ac84-f8ab-436b-b526-23940d7f0463","Type":"ContainerStarted","Data":"642ee134ae3de0297a62c663149c86dc6e14dac4adbf2e8d6e007905b2d2f45c"} Feb 02 11:16:42 crc kubenswrapper[4838]: I0202 11:16:42.274165 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-9ztsj" podUID="370d896a-37f9-4510-8800-4002a77c104d" containerName="registry-server" probeResult="failure" output=< Feb 02 11:16:42 crc kubenswrapper[4838]: timeout: failed to connect service ":50051" within 1s Feb 02 11:16:42 crc kubenswrapper[4838]: > Feb 02 11:16:42 crc kubenswrapper[4838]: I0202 11:16:42.892845 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:16:42 crc kubenswrapper[4838]: I0202 11:16:42.991532 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-pkmk8"] Feb 02 11:16:42 crc kubenswrapper[4838]: I0202 11:16:42.991869 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" podUID="a0532d7b-d4a7-4021-9a42-8329a2ef50fc" containerName="dnsmasq-dns" containerID="cri-o://467e1f30e3e3bcb78002cccefd61abf3f2f4e06c3c4cf19e0fcb8f1f1aa09bf9" gracePeriod=10 Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.156482 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-7c794d67d-m7bgs" podUID="2e02e508-a0b4-415f-a020-98d39c663483" containerName="barbican-api" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.318235 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-859b6c8866-ltwgg" event={"ID":"a08effe2-908e-4666-8f0d-2348c91376cf","Type":"ContainerStarted","Data":"19e8bd2f24a032c1d334f689df2cead208c5997418f49d433c0566a1407d2bea"} Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.319490 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.319525 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.339957 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5545c69c4f-jpg4z" event={"ID":"ba0aafb6-d810-4b44-8e8a-eebc89abad94","Type":"ContainerStarted","Data":"69b8750f717c8b31f05676dcca7bb9f82c3643d3c022fbe4f20b16c100df3113"} Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.357986 4838 generic.go:334] "Generic (PLEG): container finished" podID="a0532d7b-d4a7-4021-9a42-8329a2ef50fc" containerID="467e1f30e3e3bcb78002cccefd61abf3f2f4e06c3c4cf19e0fcb8f1f1aa09bf9" exitCode=0 Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.358057 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" event={"ID":"a0532d7b-d4a7-4021-9a42-8329a2ef50fc","Type":"ContainerDied","Data":"467e1f30e3e3bcb78002cccefd61abf3f2f4e06c3c4cf19e0fcb8f1f1aa09bf9"} Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.368741 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt" event={"ID":"4001ac84-f8ab-436b-b526-23940d7f0463","Type":"ContainerStarted","Data":"e2901dc7c668698eed8bddec9a3aa25c9fe19d2752de4eb8b62f56799a93f61d"} Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.376725 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-859b6c8866-ltwgg" podStartSLOduration=7.376686683 podStartE2EDuration="7.376686683s" podCreationTimestamp="2026-02-02 11:16:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:16:44.348058027 +0000 UTC m=+1398.685159075" watchObservedRunningTime="2026-02-02 11:16:44.376686683 +0000 UTC m=+1398.713787711" Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.428062 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-5545c69c4f-jpg4z" podStartSLOduration=7.053865107 podStartE2EDuration="12.427919645s" podCreationTimestamp="2026-02-02 11:16:32 +0000 UTC" firstStartedPulling="2026-02-02 11:16:33.321819283 +0000 UTC m=+1387.658920311" lastFinishedPulling="2026-02-02 11:16:38.695873811 +0000 UTC m=+1393.032974849" observedRunningTime="2026-02-02 11:16:44.367022748 +0000 UTC m=+1398.704123786" watchObservedRunningTime="2026-02-02 11:16:44.427919645 +0000 UTC m=+1398.765020673" Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.433824 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-6c7fd57b7d-nvvkt" podStartSLOduration=6.454977362 podStartE2EDuration="12.43380093s" podCreationTimestamp="2026-02-02 11:16:32 +0000 UTC" firstStartedPulling="2026-02-02 11:16:33.380906052 +0000 UTC m=+1387.718007080" lastFinishedPulling="2026-02-02 11:16:39.35972962 +0000 UTC m=+1393.696830648" observedRunningTime="2026-02-02 11:16:44.396175687 +0000 UTC m=+1398.733276735" watchObservedRunningTime="2026-02-02 11:16:44.43380093 +0000 UTC m=+1398.770901978" Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.553225 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.615645 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-ovsdbserver-nb\") pod \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.615756 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-config\") pod \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.615807 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-dns-svc\") pod \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.615839 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-ovsdbserver-sb\") pod \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.615903 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q88gq\" (UniqueName: \"kubernetes.io/projected/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-kube-api-access-q88gq\") pod \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.615997 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-dns-swift-storage-0\") pod \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\" (UID: \"a0532d7b-d4a7-4021-9a42-8329a2ef50fc\") " Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.627913 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-kube-api-access-q88gq" (OuterVolumeSpecName: "kube-api-access-q88gq") pod "a0532d7b-d4a7-4021-9a42-8329a2ef50fc" (UID: "a0532d7b-d4a7-4021-9a42-8329a2ef50fc"). InnerVolumeSpecName "kube-api-access-q88gq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.706058 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-config" (OuterVolumeSpecName: "config") pod "a0532d7b-d4a7-4021-9a42-8329a2ef50fc" (UID: "a0532d7b-d4a7-4021-9a42-8329a2ef50fc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.706800 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a0532d7b-d4a7-4021-9a42-8329a2ef50fc" (UID: "a0532d7b-d4a7-4021-9a42-8329a2ef50fc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.719146 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.719179 4838 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.719192 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q88gq\" (UniqueName: \"kubernetes.io/projected/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-kube-api-access-q88gq\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.719472 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a0532d7b-d4a7-4021-9a42-8329a2ef50fc" (UID: "a0532d7b-d4a7-4021-9a42-8329a2ef50fc"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.721244 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a0532d7b-d4a7-4021-9a42-8329a2ef50fc" (UID: "a0532d7b-d4a7-4021-9a42-8329a2ef50fc"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.812525 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a0532d7b-d4a7-4021-9a42-8329a2ef50fc" (UID: "a0532d7b-d4a7-4021-9a42-8329a2ef50fc"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.821256 4838 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.821299 4838 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:44 crc kubenswrapper[4838]: I0202 11:16:44.821313 4838 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a0532d7b-d4a7-4021-9a42-8329a2ef50fc-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:45 crc kubenswrapper[4838]: I0202 11:16:45.117945 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:45 crc kubenswrapper[4838]: I0202 11:16:45.237357 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:45 crc kubenswrapper[4838]: I0202 11:16:45.392106 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"253cbeb6-ec5a-404a-904c-d06b377ed987","Type":"ContainerStarted","Data":"4bb5644df3c07652e83ae67e47decdadbbf283740d52eb700f42820cfcf5bcff"} Feb 02 11:16:45 crc kubenswrapper[4838]: I0202 11:16:45.392578 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Feb 02 11:16:45 crc kubenswrapper[4838]: I0202 11:16:45.396716 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" event={"ID":"a0532d7b-d4a7-4021-9a42-8329a2ef50fc","Type":"ContainerDied","Data":"7739a2d6da279fe8145d94ef100cfb1fd055d0bf5462e7a9f8f6a32f6d36d8aa"} Feb 02 11:16:45 crc kubenswrapper[4838]: I0202 11:16:45.396782 4838 scope.go:117] "RemoveContainer" containerID="467e1f30e3e3bcb78002cccefd61abf3f2f4e06c3c4cf19e0fcb8f1f1aa09bf9" Feb 02 11:16:45 crc kubenswrapper[4838]: I0202 11:16:45.396853 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-pkmk8" Feb 02 11:16:45 crc kubenswrapper[4838]: I0202 11:16:45.414465 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=3.6240047239999997 podStartE2EDuration="8.414438109s" podCreationTimestamp="2026-02-02 11:16:37 +0000 UTC" firstStartedPulling="2026-02-02 11:16:39.306564117 +0000 UTC m=+1393.643665145" lastFinishedPulling="2026-02-02 11:16:44.096997502 +0000 UTC m=+1398.434098530" observedRunningTime="2026-02-02 11:16:45.413250287 +0000 UTC m=+1399.750351315" watchObservedRunningTime="2026-02-02 11:16:45.414438109 +0000 UTC m=+1399.751539137" Feb 02 11:16:45 crc kubenswrapper[4838]: I0202 11:16:45.430539 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:16:45 crc kubenswrapper[4838]: I0202 11:16:45.430678 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:16:45 crc kubenswrapper[4838]: I0202 11:16:45.523311 4838 scope.go:117] "RemoveContainer" containerID="2d44871bae04b179f8b9dabe190e8f8b63301ce7c00528361ff0c3d242f2b1ff" Feb 02 11:16:45 crc kubenswrapper[4838]: I0202 11:16:45.543849 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-pkmk8"] Feb 02 11:16:45 crc kubenswrapper[4838]: I0202 11:16:45.561738 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-pkmk8"] Feb 02 11:16:46 crc kubenswrapper[4838]: I0202 11:16:46.424906 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab","Type":"ContainerStarted","Data":"3aa40cca9cad6f6a017b9fb737503f4af87c7536e2225f190888d3cd5ccd401c"} Feb 02 11:16:46 crc kubenswrapper[4838]: I0202 11:16:46.425233 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab","Type":"ContainerStarted","Data":"9b2a1490f6bca7b17103009d68c7f64c3848c5a87dc6c3b53fc348adea9d1c57"} Feb 02 11:16:46 crc kubenswrapper[4838]: I0202 11:16:46.523472 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0532d7b-d4a7-4021-9a42-8329a2ef50fc" path="/var/lib/kubelet/pods/a0532d7b-d4a7-4021-9a42-8329a2ef50fc/volumes" Feb 02 11:16:47 crc kubenswrapper[4838]: I0202 11:16:47.699930 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:48 crc kubenswrapper[4838]: I0202 11:16:48.446864 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab","Type":"ContainerStarted","Data":"7acddb017db27a800569fca268a18277ca4a06550fc6bb589be69bbb2cff9ae7"} Feb 02 11:16:51 crc kubenswrapper[4838]: I0202 11:16:51.277437 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9ztsj" Feb 02 11:16:51 crc kubenswrapper[4838]: I0202 11:16:51.330004 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9ztsj" Feb 02 11:16:51 crc kubenswrapper[4838]: I0202 11:16:51.473564 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab","Type":"ContainerStarted","Data":"cf3c52eda25dcac184dc2bca2e37355ec8b213ddc8f0951561bdcb1eaa23ecca"} Feb 02 11:16:52 crc kubenswrapper[4838]: I0202 11:16:52.089169 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9ztsj"] Feb 02 11:16:52 crc kubenswrapper[4838]: I0202 11:16:52.484378 4838 generic.go:334] "Generic (PLEG): container finished" podID="c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" containerID="cf3c52eda25dcac184dc2bca2e37355ec8b213ddc8f0951561bdcb1eaa23ecca" exitCode=1 Feb 02 11:16:52 crc kubenswrapper[4838]: I0202 11:16:52.484547 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" containerName="ceilometer-central-agent" containerID="cri-o://9b2a1490f6bca7b17103009d68c7f64c3848c5a87dc6c3b53fc348adea9d1c57" gracePeriod=30 Feb 02 11:16:52 crc kubenswrapper[4838]: I0202 11:16:52.484604 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab","Type":"ContainerDied","Data":"cf3c52eda25dcac184dc2bca2e37355ec8b213ddc8f0951561bdcb1eaa23ecca"} Feb 02 11:16:52 crc kubenswrapper[4838]: I0202 11:16:52.484748 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" containerName="sg-core" containerID="cri-o://7acddb017db27a800569fca268a18277ca4a06550fc6bb589be69bbb2cff9ae7" gracePeriod=30 Feb 02 11:16:52 crc kubenswrapper[4838]: I0202 11:16:52.484817 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" containerName="ceilometer-notification-agent" containerID="cri-o://3aa40cca9cad6f6a017b9fb737503f4af87c7536e2225f190888d3cd5ccd401c" gracePeriod=30 Feb 02 11:16:52 crc kubenswrapper[4838]: I0202 11:16:52.486331 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9ztsj" podUID="370d896a-37f9-4510-8800-4002a77c104d" containerName="registry-server" containerID="cri-o://c3aa15aa0daa6cd898bb97889a0838e9eefafd05b6e970db29db513eccdcbf64" gracePeriod=2 Feb 02 11:16:52 crc kubenswrapper[4838]: I0202 11:16:52.895706 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-859b6c8866-ltwgg" Feb 02 11:16:52 crc kubenswrapper[4838]: I0202 11:16:52.981355 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7c794d67d-m7bgs"] Feb 02 11:16:52 crc kubenswrapper[4838]: I0202 11:16:52.981678 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7c794d67d-m7bgs" podUID="2e02e508-a0b4-415f-a020-98d39c663483" containerName="barbican-api-log" containerID="cri-o://b50c9e2d52d1edde38b5fb4e522cf93e8959beffa6a892de7628550268a1ce0b" gracePeriod=30 Feb 02 11:16:52 crc kubenswrapper[4838]: I0202 11:16:52.981808 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7c794d67d-m7bgs" podUID="2e02e508-a0b4-415f-a020-98d39c663483" containerName="barbican-api" containerID="cri-o://7d5bb5d9de96729f138eb26c32c40200d811eef9cd8af9f50521718a8019cf36" gracePeriod=30 Feb 02 11:16:53 crc kubenswrapper[4838]: I0202 11:16:53.496428 4838 generic.go:334] "Generic (PLEG): container finished" podID="370d896a-37f9-4510-8800-4002a77c104d" containerID="c3aa15aa0daa6cd898bb97889a0838e9eefafd05b6e970db29db513eccdcbf64" exitCode=0 Feb 02 11:16:53 crc kubenswrapper[4838]: I0202 11:16:53.496815 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9ztsj" event={"ID":"370d896a-37f9-4510-8800-4002a77c104d","Type":"ContainerDied","Data":"c3aa15aa0daa6cd898bb97889a0838e9eefafd05b6e970db29db513eccdcbf64"} Feb 02 11:16:53 crc kubenswrapper[4838]: I0202 11:16:53.496844 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9ztsj" event={"ID":"370d896a-37f9-4510-8800-4002a77c104d","Type":"ContainerDied","Data":"544c32ece08fbf763d311161c39c7eaf3d997303f73339f41334018988e8b5ce"} Feb 02 11:16:53 crc kubenswrapper[4838]: I0202 11:16:53.496856 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="544c32ece08fbf763d311161c39c7eaf3d997303f73339f41334018988e8b5ce" Feb 02 11:16:53 crc kubenswrapper[4838]: I0202 11:16:53.500286 4838 generic.go:334] "Generic (PLEG): container finished" podID="2e02e508-a0b4-415f-a020-98d39c663483" containerID="b50c9e2d52d1edde38b5fb4e522cf93e8959beffa6a892de7628550268a1ce0b" exitCode=143 Feb 02 11:16:53 crc kubenswrapper[4838]: I0202 11:16:53.500373 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c794d67d-m7bgs" event={"ID":"2e02e508-a0b4-415f-a020-98d39c663483","Type":"ContainerDied","Data":"b50c9e2d52d1edde38b5fb4e522cf93e8959beffa6a892de7628550268a1ce0b"} Feb 02 11:16:53 crc kubenswrapper[4838]: I0202 11:16:53.503576 4838 generic.go:334] "Generic (PLEG): container finished" podID="c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" containerID="7acddb017db27a800569fca268a18277ca4a06550fc6bb589be69bbb2cff9ae7" exitCode=2 Feb 02 11:16:53 crc kubenswrapper[4838]: I0202 11:16:53.503601 4838 generic.go:334] "Generic (PLEG): container finished" podID="c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" containerID="3aa40cca9cad6f6a017b9fb737503f4af87c7536e2225f190888d3cd5ccd401c" exitCode=0 Feb 02 11:16:53 crc kubenswrapper[4838]: I0202 11:16:53.503636 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab","Type":"ContainerDied","Data":"7acddb017db27a800569fca268a18277ca4a06550fc6bb589be69bbb2cff9ae7"} Feb 02 11:16:53 crc kubenswrapper[4838]: I0202 11:16:53.503662 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab","Type":"ContainerDied","Data":"3aa40cca9cad6f6a017b9fb737503f4af87c7536e2225f190888d3cd5ccd401c"} Feb 02 11:16:53 crc kubenswrapper[4838]: I0202 11:16:53.505452 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9ztsj" Feb 02 11:16:53 crc kubenswrapper[4838]: I0202 11:16:53.697220 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/370d896a-37f9-4510-8800-4002a77c104d-catalog-content\") pod \"370d896a-37f9-4510-8800-4002a77c104d\" (UID: \"370d896a-37f9-4510-8800-4002a77c104d\") " Feb 02 11:16:53 crc kubenswrapper[4838]: I0202 11:16:53.697383 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/370d896a-37f9-4510-8800-4002a77c104d-utilities\") pod \"370d896a-37f9-4510-8800-4002a77c104d\" (UID: \"370d896a-37f9-4510-8800-4002a77c104d\") " Feb 02 11:16:53 crc kubenswrapper[4838]: I0202 11:16:53.697619 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qw57c\" (UniqueName: \"kubernetes.io/projected/370d896a-37f9-4510-8800-4002a77c104d-kube-api-access-qw57c\") pod \"370d896a-37f9-4510-8800-4002a77c104d\" (UID: \"370d896a-37f9-4510-8800-4002a77c104d\") " Feb 02 11:16:53 crc kubenswrapper[4838]: I0202 11:16:53.698692 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/370d896a-37f9-4510-8800-4002a77c104d-utilities" (OuterVolumeSpecName: "utilities") pod "370d896a-37f9-4510-8800-4002a77c104d" (UID: "370d896a-37f9-4510-8800-4002a77c104d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:16:53 crc kubenswrapper[4838]: I0202 11:16:53.704857 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/370d896a-37f9-4510-8800-4002a77c104d-kube-api-access-qw57c" (OuterVolumeSpecName: "kube-api-access-qw57c") pod "370d896a-37f9-4510-8800-4002a77c104d" (UID: "370d896a-37f9-4510-8800-4002a77c104d"). InnerVolumeSpecName "kube-api-access-qw57c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:16:53 crc kubenswrapper[4838]: I0202 11:16:53.797470 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/370d896a-37f9-4510-8800-4002a77c104d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "370d896a-37f9-4510-8800-4002a77c104d" (UID: "370d896a-37f9-4510-8800-4002a77c104d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:16:53 crc kubenswrapper[4838]: I0202 11:16:53.799751 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qw57c\" (UniqueName: \"kubernetes.io/projected/370d896a-37f9-4510-8800-4002a77c104d-kube-api-access-qw57c\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:53 crc kubenswrapper[4838]: I0202 11:16:53.799807 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/370d896a-37f9-4510-8800-4002a77c104d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:53 crc kubenswrapper[4838]: I0202 11:16:53.799825 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/370d896a-37f9-4510-8800-4002a77c104d-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:54 crc kubenswrapper[4838]: I0202 11:16:54.510939 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9ztsj" Feb 02 11:16:54 crc kubenswrapper[4838]: I0202 11:16:54.577793 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9ztsj"] Feb 02 11:16:54 crc kubenswrapper[4838]: I0202 11:16:54.585130 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9ztsj"] Feb 02 11:16:55 crc kubenswrapper[4838]: I0202 11:16:55.521761 4838 generic.go:334] "Generic (PLEG): container finished" podID="c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" containerID="9b2a1490f6bca7b17103009d68c7f64c3848c5a87dc6c3b53fc348adea9d1c57" exitCode=0 Feb 02 11:16:55 crc kubenswrapper[4838]: I0202 11:16:55.521798 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab","Type":"ContainerDied","Data":"9b2a1490f6bca7b17103009d68c7f64c3848c5a87dc6c3b53fc348adea9d1c57"} Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.124930 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7c794d67d-m7bgs" podUID="2e02e508-a0b4-415f-a020-98d39c663483" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.172:9311/healthcheck\": read tcp 10.217.0.2:50866->10.217.0.172:9311: read: connection reset by peer" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.125277 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7c794d67d-m7bgs" podUID="2e02e508-a0b4-415f-a020-98d39c663483" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.172:9311/healthcheck\": read tcp 10.217.0.2:50872->10.217.0.172:9311: read: connection reset by peer" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.273765 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.351445 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-log-httpd\") pod \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.351701 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwvmn\" (UniqueName: \"kubernetes.io/projected/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-kube-api-access-xwvmn\") pod \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.351737 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-combined-ca-bundle\") pod \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.351785 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-scripts\") pod \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.351818 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-run-httpd\") pod \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.351884 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-config-data\") pod \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.351975 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-sg-core-conf-yaml\") pod \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\" (UID: \"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab\") " Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.352218 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" (UID: "c101b37f-f6a8-49ff-a03c-cbae4a2f10ab"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.352607 4838 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.353123 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" (UID: "c101b37f-f6a8-49ff-a03c-cbae4a2f10ab"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.358230 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-scripts" (OuterVolumeSpecName: "scripts") pod "c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" (UID: "c101b37f-f6a8-49ff-a03c-cbae4a2f10ab"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.358523 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-kube-api-access-xwvmn" (OuterVolumeSpecName: "kube-api-access-xwvmn") pod "c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" (UID: "c101b37f-f6a8-49ff-a03c-cbae4a2f10ab"). InnerVolumeSpecName "kube-api-access-xwvmn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.384898 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" (UID: "c101b37f-f6a8-49ff-a03c-cbae4a2f10ab"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.449124 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" (UID: "c101b37f-f6a8-49ff-a03c-cbae4a2f10ab"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.461982 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.462019 4838 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.462035 4838 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.462047 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwvmn\" (UniqueName: \"kubernetes.io/projected/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-kube-api-access-xwvmn\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.462061 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.491763 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-config-data" (OuterVolumeSpecName: "config-data") pod "c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" (UID: "c101b37f-f6a8-49ff-a03c-cbae4a2f10ab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.518248 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="370d896a-37f9-4510-8800-4002a77c104d" path="/var/lib/kubelet/pods/370d896a-37f9-4510-8800-4002a77c104d/volumes" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.537299 4838 generic.go:334] "Generic (PLEG): container finished" podID="2e02e508-a0b4-415f-a020-98d39c663483" containerID="7d5bb5d9de96729f138eb26c32c40200d811eef9cd8af9f50521718a8019cf36" exitCode=0 Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.537377 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c794d67d-m7bgs" event={"ID":"2e02e508-a0b4-415f-a020-98d39c663483","Type":"ContainerDied","Data":"7d5bb5d9de96729f138eb26c32c40200d811eef9cd8af9f50521718a8019cf36"} Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.537411 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c794d67d-m7bgs" event={"ID":"2e02e508-a0b4-415f-a020-98d39c663483","Type":"ContainerDied","Data":"d42ff584a053002c4c80eec24ae954884fb2fdfe59baf4c27047a8a9572240e3"} Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.537424 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d42ff584a053002c4c80eec24ae954884fb2fdfe59baf4c27047a8a9572240e3" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.539828 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c101b37f-f6a8-49ff-a03c-cbae4a2f10ab","Type":"ContainerDied","Data":"38a5f4d3134efeefaa9746f0248a12774156346474f9bab372bbf53a7cf533a1"} Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.539872 4838 scope.go:117] "RemoveContainer" containerID="cf3c52eda25dcac184dc2bca2e37355ec8b213ddc8f0951561bdcb1eaa23ecca" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.540446 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.564118 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.583231 4838 scope.go:117] "RemoveContainer" containerID="7acddb017db27a800569fca268a18277ca4a06550fc6bb589be69bbb2cff9ae7" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.626358 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.635949 4838 scope.go:117] "RemoveContainer" containerID="3aa40cca9cad6f6a017b9fb737503f4af87c7536e2225f190888d3cd5ccd401c" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.637880 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.650959 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.665343 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e02e508-a0b4-415f-a020-98d39c663483-config-data\") pod \"2e02e508-a0b4-415f-a020-98d39c663483\" (UID: \"2e02e508-a0b4-415f-a020-98d39c663483\") " Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.665447 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2e02e508-a0b4-415f-a020-98d39c663483-config-data-custom\") pod \"2e02e508-a0b4-415f-a020-98d39c663483\" (UID: \"2e02e508-a0b4-415f-a020-98d39c663483\") " Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.665592 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e02e508-a0b4-415f-a020-98d39c663483-combined-ca-bundle\") pod \"2e02e508-a0b4-415f-a020-98d39c663483\" (UID: \"2e02e508-a0b4-415f-a020-98d39c663483\") " Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.665712 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e02e508-a0b4-415f-a020-98d39c663483-logs\") pod \"2e02e508-a0b4-415f-a020-98d39c663483\" (UID: \"2e02e508-a0b4-415f-a020-98d39c663483\") " Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.665755 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tm4qn\" (UniqueName: \"kubernetes.io/projected/2e02e508-a0b4-415f-a020-98d39c663483-kube-api-access-tm4qn\") pod \"2e02e508-a0b4-415f-a020-98d39c663483\" (UID: \"2e02e508-a0b4-415f-a020-98d39c663483\") " Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.665922 4838 scope.go:117] "RemoveContainer" containerID="9b2a1490f6bca7b17103009d68c7f64c3848c5a87dc6c3b53fc348adea9d1c57" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.667140 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e02e508-a0b4-415f-a020-98d39c663483-logs" (OuterVolumeSpecName: "logs") pod "2e02e508-a0b4-415f-a020-98d39c663483" (UID: "2e02e508-a0b4-415f-a020-98d39c663483"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.677595 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:16:56 crc kubenswrapper[4838]: E0202 11:16:56.678075 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" containerName="ceilometer-notification-agent" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.678152 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" containerName="ceilometer-notification-agent" Feb 02 11:16:56 crc kubenswrapper[4838]: E0202 11:16:56.678197 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" containerName="sg-core" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.678206 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" containerName="sg-core" Feb 02 11:16:56 crc kubenswrapper[4838]: E0202 11:16:56.678217 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e02e508-a0b4-415f-a020-98d39c663483" containerName="barbican-api" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.678225 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e02e508-a0b4-415f-a020-98d39c663483" containerName="barbican-api" Feb 02 11:16:56 crc kubenswrapper[4838]: E0202 11:16:56.678243 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0532d7b-d4a7-4021-9a42-8329a2ef50fc" containerName="dnsmasq-dns" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.678251 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0532d7b-d4a7-4021-9a42-8329a2ef50fc" containerName="dnsmasq-dns" Feb 02 11:16:56 crc kubenswrapper[4838]: E0202 11:16:56.678262 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="370d896a-37f9-4510-8800-4002a77c104d" containerName="registry-server" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.678269 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="370d896a-37f9-4510-8800-4002a77c104d" containerName="registry-server" Feb 02 11:16:56 crc kubenswrapper[4838]: E0202 11:16:56.678282 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" containerName="proxy-httpd" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.678289 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" containerName="proxy-httpd" Feb 02 11:16:56 crc kubenswrapper[4838]: E0202 11:16:56.678317 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="370d896a-37f9-4510-8800-4002a77c104d" containerName="extract-utilities" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.678325 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="370d896a-37f9-4510-8800-4002a77c104d" containerName="extract-utilities" Feb 02 11:16:56 crc kubenswrapper[4838]: E0202 11:16:56.678345 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="370d896a-37f9-4510-8800-4002a77c104d" containerName="extract-content" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.678353 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="370d896a-37f9-4510-8800-4002a77c104d" containerName="extract-content" Feb 02 11:16:56 crc kubenswrapper[4838]: E0202 11:16:56.678368 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0532d7b-d4a7-4021-9a42-8329a2ef50fc" containerName="init" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.678375 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0532d7b-d4a7-4021-9a42-8329a2ef50fc" containerName="init" Feb 02 11:16:56 crc kubenswrapper[4838]: E0202 11:16:56.678391 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" containerName="ceilometer-central-agent" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.678399 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" containerName="ceilometer-central-agent" Feb 02 11:16:56 crc kubenswrapper[4838]: E0202 11:16:56.678410 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e02e508-a0b4-415f-a020-98d39c663483" containerName="barbican-api-log" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.678418 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e02e508-a0b4-415f-a020-98d39c663483" containerName="barbican-api-log" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.678618 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e02e508-a0b4-415f-a020-98d39c663483" containerName="barbican-api-log" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.678650 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" containerName="proxy-httpd" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.678669 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" containerName="ceilometer-notification-agent" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.678681 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e02e508-a0b4-415f-a020-98d39c663483" containerName="barbican-api" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.678693 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="370d896a-37f9-4510-8800-4002a77c104d" containerName="registry-server" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.678707 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0532d7b-d4a7-4021-9a42-8329a2ef50fc" containerName="dnsmasq-dns" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.678718 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" containerName="ceilometer-central-agent" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.678733 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" containerName="sg-core" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.680845 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.686172 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.686369 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.691647 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.691818 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e02e508-a0b4-415f-a020-98d39c663483-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2e02e508-a0b4-415f-a020-98d39c663483" (UID: "2e02e508-a0b4-415f-a020-98d39c663483"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.691983 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e02e508-a0b4-415f-a020-98d39c663483-kube-api-access-tm4qn" (OuterVolumeSpecName: "kube-api-access-tm4qn") pod "2e02e508-a0b4-415f-a020-98d39c663483" (UID: "2e02e508-a0b4-415f-a020-98d39c663483"). InnerVolumeSpecName "kube-api-access-tm4qn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.693079 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.710788 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e02e508-a0b4-415f-a020-98d39c663483-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2e02e508-a0b4-415f-a020-98d39c663483" (UID: "2e02e508-a0b4-415f-a020-98d39c663483"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.747799 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e02e508-a0b4-415f-a020-98d39c663483-config-data" (OuterVolumeSpecName: "config-data") pod "2e02e508-a0b4-415f-a020-98d39c663483" (UID: "2e02e508-a0b4-415f-a020-98d39c663483"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.770593 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.770910 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.770955 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b6aba336-e28e-41c4-9147-6862f36dfc13-log-httpd\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.770993 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vc6p\" (UniqueName: \"kubernetes.io/projected/b6aba336-e28e-41c4-9147-6862f36dfc13-kube-api-access-6vc6p\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.771021 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-scripts\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.771067 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.771095 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b6aba336-e28e-41c4-9147-6862f36dfc13-run-httpd\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.771137 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-config-data\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.771183 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e02e508-a0b4-415f-a020-98d39c663483-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.771194 4838 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e02e508-a0b4-415f-a020-98d39c663483-logs\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.771202 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tm4qn\" (UniqueName: \"kubernetes.io/projected/2e02e508-a0b4-415f-a020-98d39c663483-kube-api-access-tm4qn\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.771213 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e02e508-a0b4-415f-a020-98d39c663483-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.771224 4838 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2e02e508-a0b4-415f-a020-98d39c663483-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.873079 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-scripts\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.873191 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.873235 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b6aba336-e28e-41c4-9147-6862f36dfc13-run-httpd\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.873293 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-config-data\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.873326 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.873349 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.873396 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b6aba336-e28e-41c4-9147-6862f36dfc13-log-httpd\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.873449 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vc6p\" (UniqueName: \"kubernetes.io/projected/b6aba336-e28e-41c4-9147-6862f36dfc13-kube-api-access-6vc6p\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.874161 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b6aba336-e28e-41c4-9147-6862f36dfc13-run-httpd\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.874291 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b6aba336-e28e-41c4-9147-6862f36dfc13-log-httpd\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.877620 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.877770 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-scripts\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.878324 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.878348 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.878716 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-config-data\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:56 crc kubenswrapper[4838]: I0202 11:16:56.897543 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vc6p\" (UniqueName: \"kubernetes.io/projected/b6aba336-e28e-41c4-9147-6862f36dfc13-kube-api-access-6vc6p\") pod \"ceilometer-0\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " pod="openstack/ceilometer-0" Feb 02 11:16:57 crc kubenswrapper[4838]: I0202 11:16:57.029879 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:16:57 crc kubenswrapper[4838]: I0202 11:16:57.525276 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:16:57 crc kubenswrapper[4838]: W0202 11:16:57.535928 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6aba336_e28e_41c4_9147_6862f36dfc13.slice/crio-9e91a7f6139df8dfee0e289ad8e2365b778e22db382086d6ce3e118a3d1e58fc WatchSource:0}: Error finding container 9e91a7f6139df8dfee0e289ad8e2365b778e22db382086d6ce3e118a3d1e58fc: Status 404 returned error can't find the container with id 9e91a7f6139df8dfee0e289ad8e2365b778e22db382086d6ce3e118a3d1e58fc Feb 02 11:16:57 crc kubenswrapper[4838]: I0202 11:16:57.555641 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b6aba336-e28e-41c4-9147-6862f36dfc13","Type":"ContainerStarted","Data":"9e91a7f6139df8dfee0e289ad8e2365b778e22db382086d6ce3e118a3d1e58fc"} Feb 02 11:16:57 crc kubenswrapper[4838]: I0202 11:16:57.555689 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7c794d67d-m7bgs" Feb 02 11:16:57 crc kubenswrapper[4838]: I0202 11:16:57.611798 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7c794d67d-m7bgs"] Feb 02 11:16:57 crc kubenswrapper[4838]: I0202 11:16:57.619754 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-7c794d67d-m7bgs"] Feb 02 11:16:57 crc kubenswrapper[4838]: I0202 11:16:57.686951 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Feb 02 11:16:58 crc kubenswrapper[4838]: I0202 11:16:58.517209 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e02e508-a0b4-415f-a020-98d39c663483" path="/var/lib/kubelet/pods/2e02e508-a0b4-415f-a020-98d39c663483/volumes" Feb 02 11:16:58 crc kubenswrapper[4838]: I0202 11:16:58.518429 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c101b37f-f6a8-49ff-a03c-cbae4a2f10ab" path="/var/lib/kubelet/pods/c101b37f-f6a8-49ff-a03c-cbae4a2f10ab/volumes" Feb 02 11:16:58 crc kubenswrapper[4838]: I0202 11:16:58.715415 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:17:00 crc kubenswrapper[4838]: I0202 11:17:00.588499 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b6aba336-e28e-41c4-9147-6862f36dfc13","Type":"ContainerStarted","Data":"769556c7cbf635256cbc9387cbc48284cbc2c5ae283507a9ed29b47dabe1b782"} Feb 02 11:17:03 crc kubenswrapper[4838]: I0202 11:17:03.617479 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b6aba336-e28e-41c4-9147-6862f36dfc13","Type":"ContainerStarted","Data":"fd5490b7c54dc605dd1906a4d9e6178afd9a6b6d9c52eee4a836ae4e70bc8c06"} Feb 02 11:17:06 crc kubenswrapper[4838]: I0202 11:17:06.643394 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b6aba336-e28e-41c4-9147-6862f36dfc13","Type":"ContainerStarted","Data":"0200ea1a6d8922e766b77db1204e5301d3b7fa132ee9dd1133192269af60684d"} Feb 02 11:17:10 crc kubenswrapper[4838]: I0202 11:17:10.679372 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b6aba336-e28e-41c4-9147-6862f36dfc13","Type":"ContainerStarted","Data":"4a2025f71220131c142054a076986d19f5692111a3dad72a6cebc78c5b31f0b0"} Feb 02 11:17:10 crc kubenswrapper[4838]: I0202 11:17:10.679520 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b6aba336-e28e-41c4-9147-6862f36dfc13" containerName="proxy-httpd" containerID="cri-o://4a2025f71220131c142054a076986d19f5692111a3dad72a6cebc78c5b31f0b0" gracePeriod=30 Feb 02 11:17:10 crc kubenswrapper[4838]: I0202 11:17:10.679528 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b6aba336-e28e-41c4-9147-6862f36dfc13" containerName="sg-core" containerID="cri-o://0200ea1a6d8922e766b77db1204e5301d3b7fa132ee9dd1133192269af60684d" gracePeriod=30 Feb 02 11:17:10 crc kubenswrapper[4838]: I0202 11:17:10.679564 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b6aba336-e28e-41c4-9147-6862f36dfc13" containerName="ceilometer-notification-agent" containerID="cri-o://fd5490b7c54dc605dd1906a4d9e6178afd9a6b6d9c52eee4a836ae4e70bc8c06" gracePeriod=30 Feb 02 11:17:10 crc kubenswrapper[4838]: I0202 11:17:10.679492 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b6aba336-e28e-41c4-9147-6862f36dfc13" containerName="ceilometer-central-agent" containerID="cri-o://769556c7cbf635256cbc9387cbc48284cbc2c5ae283507a9ed29b47dabe1b782" gracePeriod=30 Feb 02 11:17:10 crc kubenswrapper[4838]: I0202 11:17:10.681292 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 11:17:10 crc kubenswrapper[4838]: I0202 11:17:10.706850 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.317672708 podStartE2EDuration="14.706829449s" podCreationTimestamp="2026-02-02 11:16:56 +0000 UTC" firstStartedPulling="2026-02-02 11:16:57.539521463 +0000 UTC m=+1411.876622501" lastFinishedPulling="2026-02-02 11:17:09.928678214 +0000 UTC m=+1424.265779242" observedRunningTime="2026-02-02 11:17:10.702052053 +0000 UTC m=+1425.039153101" watchObservedRunningTime="2026-02-02 11:17:10.706829449 +0000 UTC m=+1425.043930507" Feb 02 11:17:11 crc kubenswrapper[4838]: I0202 11:17:11.694541 4838 generic.go:334] "Generic (PLEG): container finished" podID="b6aba336-e28e-41c4-9147-6862f36dfc13" containerID="4a2025f71220131c142054a076986d19f5692111a3dad72a6cebc78c5b31f0b0" exitCode=0 Feb 02 11:17:11 crc kubenswrapper[4838]: I0202 11:17:11.694893 4838 generic.go:334] "Generic (PLEG): container finished" podID="b6aba336-e28e-41c4-9147-6862f36dfc13" containerID="0200ea1a6d8922e766b77db1204e5301d3b7fa132ee9dd1133192269af60684d" exitCode=2 Feb 02 11:17:11 crc kubenswrapper[4838]: I0202 11:17:11.694902 4838 generic.go:334] "Generic (PLEG): container finished" podID="b6aba336-e28e-41c4-9147-6862f36dfc13" containerID="fd5490b7c54dc605dd1906a4d9e6178afd9a6b6d9c52eee4a836ae4e70bc8c06" exitCode=0 Feb 02 11:17:11 crc kubenswrapper[4838]: I0202 11:17:11.694908 4838 generic.go:334] "Generic (PLEG): container finished" podID="b6aba336-e28e-41c4-9147-6862f36dfc13" containerID="769556c7cbf635256cbc9387cbc48284cbc2c5ae283507a9ed29b47dabe1b782" exitCode=0 Feb 02 11:17:11 crc kubenswrapper[4838]: I0202 11:17:11.694708 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b6aba336-e28e-41c4-9147-6862f36dfc13","Type":"ContainerDied","Data":"4a2025f71220131c142054a076986d19f5692111a3dad72a6cebc78c5b31f0b0"} Feb 02 11:17:11 crc kubenswrapper[4838]: I0202 11:17:11.694945 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b6aba336-e28e-41c4-9147-6862f36dfc13","Type":"ContainerDied","Data":"0200ea1a6d8922e766b77db1204e5301d3b7fa132ee9dd1133192269af60684d"} Feb 02 11:17:11 crc kubenswrapper[4838]: I0202 11:17:11.694960 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b6aba336-e28e-41c4-9147-6862f36dfc13","Type":"ContainerDied","Data":"fd5490b7c54dc605dd1906a4d9e6178afd9a6b6d9c52eee4a836ae4e70bc8c06"} Feb 02 11:17:11 crc kubenswrapper[4838]: I0202 11:17:11.694971 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b6aba336-e28e-41c4-9147-6862f36dfc13","Type":"ContainerDied","Data":"769556c7cbf635256cbc9387cbc48284cbc2c5ae283507a9ed29b47dabe1b782"} Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.056407 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.158315 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b6aba336-e28e-41c4-9147-6862f36dfc13-run-httpd\") pod \"b6aba336-e28e-41c4-9147-6862f36dfc13\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.158362 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-scripts\") pod \"b6aba336-e28e-41c4-9147-6862f36dfc13\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.158404 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-sg-core-conf-yaml\") pod \"b6aba336-e28e-41c4-9147-6862f36dfc13\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.158484 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vc6p\" (UniqueName: \"kubernetes.io/projected/b6aba336-e28e-41c4-9147-6862f36dfc13-kube-api-access-6vc6p\") pod \"b6aba336-e28e-41c4-9147-6862f36dfc13\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.158516 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-config-data\") pod \"b6aba336-e28e-41c4-9147-6862f36dfc13\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.158574 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b6aba336-e28e-41c4-9147-6862f36dfc13-log-httpd\") pod \"b6aba336-e28e-41c4-9147-6862f36dfc13\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.158597 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-combined-ca-bundle\") pod \"b6aba336-e28e-41c4-9147-6862f36dfc13\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.158657 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-ceilometer-tls-certs\") pod \"b6aba336-e28e-41c4-9147-6862f36dfc13\" (UID: \"b6aba336-e28e-41c4-9147-6862f36dfc13\") " Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.158938 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6aba336-e28e-41c4-9147-6862f36dfc13-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b6aba336-e28e-41c4-9147-6862f36dfc13" (UID: "b6aba336-e28e-41c4-9147-6862f36dfc13"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.159151 4838 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b6aba336-e28e-41c4-9147-6862f36dfc13-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.160075 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6aba336-e28e-41c4-9147-6862f36dfc13-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b6aba336-e28e-41c4-9147-6862f36dfc13" (UID: "b6aba336-e28e-41c4-9147-6862f36dfc13"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.163558 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-scripts" (OuterVolumeSpecName: "scripts") pod "b6aba336-e28e-41c4-9147-6862f36dfc13" (UID: "b6aba336-e28e-41c4-9147-6862f36dfc13"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.166254 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6aba336-e28e-41c4-9147-6862f36dfc13-kube-api-access-6vc6p" (OuterVolumeSpecName: "kube-api-access-6vc6p") pod "b6aba336-e28e-41c4-9147-6862f36dfc13" (UID: "b6aba336-e28e-41c4-9147-6862f36dfc13"). InnerVolumeSpecName "kube-api-access-6vc6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.184224 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b6aba336-e28e-41c4-9147-6862f36dfc13" (UID: "b6aba336-e28e-41c4-9147-6862f36dfc13"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.220592 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "b6aba336-e28e-41c4-9147-6862f36dfc13" (UID: "b6aba336-e28e-41c4-9147-6862f36dfc13"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.235174 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b6aba336-e28e-41c4-9147-6862f36dfc13" (UID: "b6aba336-e28e-41c4-9147-6862f36dfc13"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.247735 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-config-data" (OuterVolumeSpecName: "config-data") pod "b6aba336-e28e-41c4-9147-6862f36dfc13" (UID: "b6aba336-e28e-41c4-9147-6862f36dfc13"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.259871 4838 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.259901 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vc6p\" (UniqueName: \"kubernetes.io/projected/b6aba336-e28e-41c4-9147-6862f36dfc13-kube-api-access-6vc6p\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.259912 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.259920 4838 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b6aba336-e28e-41c4-9147-6862f36dfc13-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.259929 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.259939 4838 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.259947 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6aba336-e28e-41c4-9147-6862f36dfc13-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.707845 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b6aba336-e28e-41c4-9147-6862f36dfc13","Type":"ContainerDied","Data":"9e91a7f6139df8dfee0e289ad8e2365b778e22db382086d6ce3e118a3d1e58fc"} Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.707897 4838 scope.go:117] "RemoveContainer" containerID="4a2025f71220131c142054a076986d19f5692111a3dad72a6cebc78c5b31f0b0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.707908 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.733035 4838 scope.go:117] "RemoveContainer" containerID="0200ea1a6d8922e766b77db1204e5301d3b7fa132ee9dd1133192269af60684d" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.736543 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.749510 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.757721 4838 scope.go:117] "RemoveContainer" containerID="fd5490b7c54dc605dd1906a4d9e6178afd9a6b6d9c52eee4a836ae4e70bc8c06" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.763355 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:17:12 crc kubenswrapper[4838]: E0202 11:17:12.763774 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6aba336-e28e-41c4-9147-6862f36dfc13" containerName="ceilometer-notification-agent" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.763793 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6aba336-e28e-41c4-9147-6862f36dfc13" containerName="ceilometer-notification-agent" Feb 02 11:17:12 crc kubenswrapper[4838]: E0202 11:17:12.763818 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6aba336-e28e-41c4-9147-6862f36dfc13" containerName="proxy-httpd" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.763826 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6aba336-e28e-41c4-9147-6862f36dfc13" containerName="proxy-httpd" Feb 02 11:17:12 crc kubenswrapper[4838]: E0202 11:17:12.763840 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6aba336-e28e-41c4-9147-6862f36dfc13" containerName="sg-core" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.763846 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6aba336-e28e-41c4-9147-6862f36dfc13" containerName="sg-core" Feb 02 11:17:12 crc kubenswrapper[4838]: E0202 11:17:12.763863 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6aba336-e28e-41c4-9147-6862f36dfc13" containerName="ceilometer-central-agent" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.763868 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6aba336-e28e-41c4-9147-6862f36dfc13" containerName="ceilometer-central-agent" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.764021 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6aba336-e28e-41c4-9147-6862f36dfc13" containerName="ceilometer-central-agent" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.764040 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6aba336-e28e-41c4-9147-6862f36dfc13" containerName="sg-core" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.764046 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6aba336-e28e-41c4-9147-6862f36dfc13" containerName="proxy-httpd" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.764081 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6aba336-e28e-41c4-9147-6862f36dfc13" containerName="ceilometer-notification-agent" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.765634 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.767443 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.768191 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.776539 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.783014 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.797119 4838 scope.go:117] "RemoveContainer" containerID="769556c7cbf635256cbc9387cbc48284cbc2c5ae283507a9ed29b47dabe1b782" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.867992 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4d3847cc-dec5-4288-9404-f55d46551d3b-log-httpd\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.868074 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpbmp\" (UniqueName: \"kubernetes.io/projected/4d3847cc-dec5-4288-9404-f55d46551d3b-kube-api-access-qpbmp\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.868138 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.868162 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4d3847cc-dec5-4288-9404-f55d46551d3b-run-httpd\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.868217 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.868297 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-config-data\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.868330 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-scripts\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.868359 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.969359 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4d3847cc-dec5-4288-9404-f55d46551d3b-log-httpd\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.969457 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpbmp\" (UniqueName: \"kubernetes.io/projected/4d3847cc-dec5-4288-9404-f55d46551d3b-kube-api-access-qpbmp\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.969520 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.969543 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4d3847cc-dec5-4288-9404-f55d46551d3b-run-httpd\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.969575 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.969631 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-config-data\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.969661 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-scripts\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.969684 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.970416 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4d3847cc-dec5-4288-9404-f55d46551d3b-log-httpd\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.970659 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4d3847cc-dec5-4288-9404-f55d46551d3b-run-httpd\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.976999 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.977494 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.978269 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-config-data\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:12 crc kubenswrapper[4838]: I0202 11:17:12.978305 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:13 crc kubenswrapper[4838]: I0202 11:17:13.010655 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-scripts\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:13 crc kubenswrapper[4838]: I0202 11:17:13.011045 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpbmp\" (UniqueName: \"kubernetes.io/projected/4d3847cc-dec5-4288-9404-f55d46551d3b-kube-api-access-qpbmp\") pod \"ceilometer-0\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " pod="openstack/ceilometer-0" Feb 02 11:17:13 crc kubenswrapper[4838]: I0202 11:17:13.084802 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:17:13 crc kubenswrapper[4838]: I0202 11:17:13.573699 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:17:13 crc kubenswrapper[4838]: I0202 11:17:13.719994 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4d3847cc-dec5-4288-9404-f55d46551d3b","Type":"ContainerStarted","Data":"2b7cc56b124b16c775423101436ce99919089e503a50b7f4ce56629122aa8a20"} Feb 02 11:17:14 crc kubenswrapper[4838]: I0202 11:17:14.518323 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6aba336-e28e-41c4-9147-6862f36dfc13" path="/var/lib/kubelet/pods/b6aba336-e28e-41c4-9147-6862f36dfc13/volumes" Feb 02 11:17:15 crc kubenswrapper[4838]: I0202 11:17:15.429651 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:17:15 crc kubenswrapper[4838]: I0202 11:17:15.430034 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:17:15 crc kubenswrapper[4838]: I0202 11:17:15.430089 4838 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 11:17:15 crc kubenswrapper[4838]: I0202 11:17:15.431637 4838 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"59dac7f34e4b14b86296ead42a59d6f6e3f3b9fd93372b24781304406104890f"} pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 11:17:15 crc kubenswrapper[4838]: I0202 11:17:15.431697 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" containerID="cri-o://59dac7f34e4b14b86296ead42a59d6f6e3f3b9fd93372b24781304406104890f" gracePeriod=600 Feb 02 11:17:15 crc kubenswrapper[4838]: I0202 11:17:15.742938 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4d3847cc-dec5-4288-9404-f55d46551d3b","Type":"ContainerStarted","Data":"a34d621cce8ea7a9cc036344dcfc1640acb49e269bec9ef7210f9133c0c20d37"} Feb 02 11:17:16 crc kubenswrapper[4838]: I0202 11:17:16.753284 4838 generic.go:334] "Generic (PLEG): container finished" podID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerID="59dac7f34e4b14b86296ead42a59d6f6e3f3b9fd93372b24781304406104890f" exitCode=0 Feb 02 11:17:16 crc kubenswrapper[4838]: I0202 11:17:16.753330 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerDied","Data":"59dac7f34e4b14b86296ead42a59d6f6e3f3b9fd93372b24781304406104890f"} Feb 02 11:17:16 crc kubenswrapper[4838]: I0202 11:17:16.753583 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerStarted","Data":"0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9"} Feb 02 11:17:16 crc kubenswrapper[4838]: I0202 11:17:16.753604 4838 scope.go:117] "RemoveContainer" containerID="34c7a9cc4d8fb6168afba32d2440c7d9ab6f69f8c80d4ae7f515c16fdb162626" Feb 02 11:17:17 crc kubenswrapper[4838]: I0202 11:17:17.772029 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4d3847cc-dec5-4288-9404-f55d46551d3b","Type":"ContainerStarted","Data":"639f7870ea82c3ab973fd106afad9e967702bca07ebcc115f428b525da7f500c"} Feb 02 11:17:21 crc kubenswrapper[4838]: I0202 11:17:21.807685 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4d3847cc-dec5-4288-9404-f55d46551d3b","Type":"ContainerStarted","Data":"5cd43ccbc62626140f99f579266a435ba7800794b615ede07dcc2ad86188d33a"} Feb 02 11:17:23 crc kubenswrapper[4838]: I0202 11:17:23.829272 4838 generic.go:334] "Generic (PLEG): container finished" podID="12da676a-3c0b-4e05-996b-6f0b859d99e3" containerID="05667767dc9c615f688152f9f30c5e1ea9de23e5a3834c9f6d404517e4864ba3" exitCode=0 Feb 02 11:17:23 crc kubenswrapper[4838]: I0202 11:17:23.829462 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5jll9" event={"ID":"12da676a-3c0b-4e05-996b-6f0b859d99e3","Type":"ContainerDied","Data":"05667767dc9c615f688152f9f30c5e1ea9de23e5a3834c9f6d404517e4864ba3"} Feb 02 11:17:24 crc kubenswrapper[4838]: I0202 11:17:24.842098 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4d3847cc-dec5-4288-9404-f55d46551d3b","Type":"ContainerStarted","Data":"35ffb794e2bbaf2a8b4908b75293dc8a32fea3a1b85f4d7f08cd57c293ae310a"} Feb 02 11:17:24 crc kubenswrapper[4838]: I0202 11:17:24.894548 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.229378281 podStartE2EDuration="12.894524394s" podCreationTimestamp="2026-02-02 11:17:12 +0000 UTC" firstStartedPulling="2026-02-02 11:17:13.580091584 +0000 UTC m=+1427.917192612" lastFinishedPulling="2026-02-02 11:17:24.245237707 +0000 UTC m=+1438.582338725" observedRunningTime="2026-02-02 11:17:24.882816674 +0000 UTC m=+1439.219917722" watchObservedRunningTime="2026-02-02 11:17:24.894524394 +0000 UTC m=+1439.231625432" Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.258003 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5jll9" Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.424270 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-db-sync-config-data\") pod \"12da676a-3c0b-4e05-996b-6f0b859d99e3\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.424962 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/12da676a-3c0b-4e05-996b-6f0b859d99e3-etc-machine-id\") pod \"12da676a-3c0b-4e05-996b-6f0b859d99e3\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.425033 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-scripts\") pod \"12da676a-3c0b-4e05-996b-6f0b859d99e3\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.425363 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-combined-ca-bundle\") pod \"12da676a-3c0b-4e05-996b-6f0b859d99e3\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.425389 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/12da676a-3c0b-4e05-996b-6f0b859d99e3-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "12da676a-3c0b-4e05-996b-6f0b859d99e3" (UID: "12da676a-3c0b-4e05-996b-6f0b859d99e3"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.425584 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-29dl6\" (UniqueName: \"kubernetes.io/projected/12da676a-3c0b-4e05-996b-6f0b859d99e3-kube-api-access-29dl6\") pod \"12da676a-3c0b-4e05-996b-6f0b859d99e3\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.425689 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-config-data\") pod \"12da676a-3c0b-4e05-996b-6f0b859d99e3\" (UID: \"12da676a-3c0b-4e05-996b-6f0b859d99e3\") " Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.426489 4838 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/12da676a-3c0b-4e05-996b-6f0b859d99e3-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.431334 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "12da676a-3c0b-4e05-996b-6f0b859d99e3" (UID: "12da676a-3c0b-4e05-996b-6f0b859d99e3"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.444098 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12da676a-3c0b-4e05-996b-6f0b859d99e3-kube-api-access-29dl6" (OuterVolumeSpecName: "kube-api-access-29dl6") pod "12da676a-3c0b-4e05-996b-6f0b859d99e3" (UID: "12da676a-3c0b-4e05-996b-6f0b859d99e3"). InnerVolumeSpecName "kube-api-access-29dl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.449718 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-scripts" (OuterVolumeSpecName: "scripts") pod "12da676a-3c0b-4e05-996b-6f0b859d99e3" (UID: "12da676a-3c0b-4e05-996b-6f0b859d99e3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.460190 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "12da676a-3c0b-4e05-996b-6f0b859d99e3" (UID: "12da676a-3c0b-4e05-996b-6f0b859d99e3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.481115 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-config-data" (OuterVolumeSpecName: "config-data") pod "12da676a-3c0b-4e05-996b-6f0b859d99e3" (UID: "12da676a-3c0b-4e05-996b-6f0b859d99e3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.527764 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-29dl6\" (UniqueName: \"kubernetes.io/projected/12da676a-3c0b-4e05-996b-6f0b859d99e3-kube-api-access-29dl6\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.527961 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.528054 4838 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.528116 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.528170 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12da676a-3c0b-4e05-996b-6f0b859d99e3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.854657 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5jll9" event={"ID":"12da676a-3c0b-4e05-996b-6f0b859d99e3","Type":"ContainerDied","Data":"a43cba69155e67a632d44189c74138934d004e5eb27583ed8688ac413fe6d55d"} Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.854726 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a43cba69155e67a632d44189c74138934d004e5eb27583ed8688ac413fe6d55d" Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.854683 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5jll9" Feb 02 11:17:25 crc kubenswrapper[4838]: I0202 11:17:25.854894 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.155179 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 11:17:26 crc kubenswrapper[4838]: E0202 11:17:26.157195 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12da676a-3c0b-4e05-996b-6f0b859d99e3" containerName="cinder-db-sync" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.157223 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="12da676a-3c0b-4e05-996b-6f0b859d99e3" containerName="cinder-db-sync" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.157461 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="12da676a-3c0b-4e05-996b-6f0b859d99e3" containerName="cinder-db-sync" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.158660 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.162228 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.162700 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.162277 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.162926 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-bsbkf" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.176180 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.240415 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-795f4db4bc-ls245"] Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.242390 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.261570 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-config\") pod \"dnsmasq-dns-795f4db4bc-ls245\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.261807 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-ovsdbserver-nb\") pod \"dnsmasq-dns-795f4db4bc-ls245\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.261873 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-scripts\") pod \"cinder-scheduler-0\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.261943 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.261980 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-config-data\") pod \"cinder-scheduler-0\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.262097 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-dns-svc\") pod \"dnsmasq-dns-795f4db4bc-ls245\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.262151 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-ovsdbserver-sb\") pod \"dnsmasq-dns-795f4db4bc-ls245\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.262192 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfg25\" (UniqueName: \"kubernetes.io/projected/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-kube-api-access-bfg25\") pod \"cinder-scheduler-0\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.262224 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.262256 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgj6g\" (UniqueName: \"kubernetes.io/projected/d60f2880-0bad-4592-a199-c24539da55ab-kube-api-access-tgj6g\") pod \"dnsmasq-dns-795f4db4bc-ls245\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.262320 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-dns-swift-storage-0\") pod \"dnsmasq-dns-795f4db4bc-ls245\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.262368 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.273253 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-795f4db4bc-ls245"] Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.364283 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-dns-svc\") pod \"dnsmasq-dns-795f4db4bc-ls245\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.364343 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-ovsdbserver-sb\") pod \"dnsmasq-dns-795f4db4bc-ls245\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.364380 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfg25\" (UniqueName: \"kubernetes.io/projected/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-kube-api-access-bfg25\") pod \"cinder-scheduler-0\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.364407 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.364428 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgj6g\" (UniqueName: \"kubernetes.io/projected/d60f2880-0bad-4592-a199-c24539da55ab-kube-api-access-tgj6g\") pod \"dnsmasq-dns-795f4db4bc-ls245\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.364481 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-dns-swift-storage-0\") pod \"dnsmasq-dns-795f4db4bc-ls245\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.364511 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.364556 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-config\") pod \"dnsmasq-dns-795f4db4bc-ls245\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.364611 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-ovsdbserver-nb\") pod \"dnsmasq-dns-795f4db4bc-ls245\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.364686 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-scripts\") pod \"cinder-scheduler-0\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.364730 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.364759 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-config-data\") pod \"cinder-scheduler-0\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.366444 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.366778 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-ovsdbserver-nb\") pod \"dnsmasq-dns-795f4db4bc-ls245\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.366730 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-dns-swift-storage-0\") pod \"dnsmasq-dns-795f4db4bc-ls245\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.367137 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-config\") pod \"dnsmasq-dns-795f4db4bc-ls245\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.367504 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-dns-svc\") pod \"dnsmasq-dns-795f4db4bc-ls245\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.367927 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-ovsdbserver-sb\") pod \"dnsmasq-dns-795f4db4bc-ls245\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.374610 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.376639 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-config-data\") pod \"cinder-scheduler-0\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.377338 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-scripts\") pod \"cinder-scheduler-0\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.387362 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.392566 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfg25\" (UniqueName: \"kubernetes.io/projected/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-kube-api-access-bfg25\") pod \"cinder-scheduler-0\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.399135 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgj6g\" (UniqueName: \"kubernetes.io/projected/d60f2880-0bad-4592-a199-c24539da55ab-kube-api-access-tgj6g\") pod \"dnsmasq-dns-795f4db4bc-ls245\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.504516 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-bsbkf" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.514140 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.538142 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.540733 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.548774 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.550333 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.565794 4838 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podc101b37f-f6a8-49ff-a03c-cbae4a2f10ab"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podc101b37f-f6a8-49ff-a03c-cbae4a2f10ab] : Timed out while waiting for systemd to remove kubepods-besteffort-podc101b37f_f6a8_49ff_a03c_cbae4a2f10ab.slice" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.594684 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.681457 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-config-data\") pod \"cinder-api-0\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " pod="openstack/cinder-api-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.681573 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-scripts\") pod \"cinder-api-0\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " pod="openstack/cinder-api-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.682658 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f9a412e8-72a9-4e5b-a609-04551277d08c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " pod="openstack/cinder-api-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.682797 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " pod="openstack/cinder-api-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.682849 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wfds\" (UniqueName: \"kubernetes.io/projected/f9a412e8-72a9-4e5b-a609-04551277d08c-kube-api-access-4wfds\") pod \"cinder-api-0\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " pod="openstack/cinder-api-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.682947 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-config-data-custom\") pod \"cinder-api-0\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " pod="openstack/cinder-api-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.683053 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9a412e8-72a9-4e5b-a609-04551277d08c-logs\") pod \"cinder-api-0\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " pod="openstack/cinder-api-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.786600 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " pod="openstack/cinder-api-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.786686 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wfds\" (UniqueName: \"kubernetes.io/projected/f9a412e8-72a9-4e5b-a609-04551277d08c-kube-api-access-4wfds\") pod \"cinder-api-0\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " pod="openstack/cinder-api-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.786744 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-config-data-custom\") pod \"cinder-api-0\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " pod="openstack/cinder-api-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.786803 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9a412e8-72a9-4e5b-a609-04551277d08c-logs\") pod \"cinder-api-0\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " pod="openstack/cinder-api-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.786872 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-config-data\") pod \"cinder-api-0\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " pod="openstack/cinder-api-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.786898 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-scripts\") pod \"cinder-api-0\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " pod="openstack/cinder-api-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.786929 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f9a412e8-72a9-4e5b-a609-04551277d08c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " pod="openstack/cinder-api-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.787029 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f9a412e8-72a9-4e5b-a609-04551277d08c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " pod="openstack/cinder-api-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.789328 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9a412e8-72a9-4e5b-a609-04551277d08c-logs\") pod \"cinder-api-0\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " pod="openstack/cinder-api-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.800462 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-config-data-custom\") pod \"cinder-api-0\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " pod="openstack/cinder-api-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.801153 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-config-data\") pod \"cinder-api-0\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " pod="openstack/cinder-api-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.801890 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " pod="openstack/cinder-api-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.831951 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wfds\" (UniqueName: \"kubernetes.io/projected/f9a412e8-72a9-4e5b-a609-04551277d08c-kube-api-access-4wfds\") pod \"cinder-api-0\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " pod="openstack/cinder-api-0" Feb 02 11:17:26 crc kubenswrapper[4838]: I0202 11:17:26.833051 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-scripts\") pod \"cinder-api-0\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " pod="openstack/cinder-api-0" Feb 02 11:17:27 crc kubenswrapper[4838]: I0202 11:17:27.008524 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 11:17:27 crc kubenswrapper[4838]: I0202 11:17:27.033218 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 11:17:27 crc kubenswrapper[4838]: I0202 11:17:27.318246 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-795f4db4bc-ls245"] Feb 02 11:17:27 crc kubenswrapper[4838]: I0202 11:17:27.592980 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 02 11:17:27 crc kubenswrapper[4838]: W0202 11:17:27.597550 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf9a412e8_72a9_4e5b_a609_04551277d08c.slice/crio-d0d12ebdf58f08f6fa7efa285bcba07b0ef99e9665f52a7e728cd3fe841dc758 WatchSource:0}: Error finding container d0d12ebdf58f08f6fa7efa285bcba07b0ef99e9665f52a7e728cd3fe841dc758: Status 404 returned error can't find the container with id d0d12ebdf58f08f6fa7efa285bcba07b0ef99e9665f52a7e728cd3fe841dc758 Feb 02 11:17:27 crc kubenswrapper[4838]: I0202 11:17:27.890777 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f9a412e8-72a9-4e5b-a609-04551277d08c","Type":"ContainerStarted","Data":"d0d12ebdf58f08f6fa7efa285bcba07b0ef99e9665f52a7e728cd3fe841dc758"} Feb 02 11:17:27 crc kubenswrapper[4838]: I0202 11:17:27.893145 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a","Type":"ContainerStarted","Data":"be40cd6c86cfe6d5f224c049d7a914b866f0c228f7188d6100f6a832fea6670e"} Feb 02 11:17:27 crc kubenswrapper[4838]: I0202 11:17:27.895135 4838 generic.go:334] "Generic (PLEG): container finished" podID="d60f2880-0bad-4592-a199-c24539da55ab" containerID="96a3b1a7488871cbb08e673c4e79ceb4d709eb0991d160fa636514d7613c68ca" exitCode=0 Feb 02 11:17:27 crc kubenswrapper[4838]: I0202 11:17:27.895177 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-795f4db4bc-ls245" event={"ID":"d60f2880-0bad-4592-a199-c24539da55ab","Type":"ContainerDied","Data":"96a3b1a7488871cbb08e673c4e79ceb4d709eb0991d160fa636514d7613c68ca"} Feb 02 11:17:27 crc kubenswrapper[4838]: I0202 11:17:27.895251 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-795f4db4bc-ls245" event={"ID":"d60f2880-0bad-4592-a199-c24539da55ab","Type":"ContainerStarted","Data":"fb24c97a4f933f0f7e84b2947ab604361d3918c223ba7a4fdab52ed9dd2189ca"} Feb 02 11:17:28 crc kubenswrapper[4838]: I0202 11:17:28.441147 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 02 11:17:28 crc kubenswrapper[4838]: I0202 11:17:28.907766 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-795f4db4bc-ls245" event={"ID":"d60f2880-0bad-4592-a199-c24539da55ab","Type":"ContainerStarted","Data":"7c3623f669ca887ea25d2b5f033920c745920685ac72a10ae4f491a1d915a005"} Feb 02 11:17:28 crc kubenswrapper[4838]: I0202 11:17:28.908170 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:28 crc kubenswrapper[4838]: I0202 11:17:28.912460 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f9a412e8-72a9-4e5b-a609-04551277d08c","Type":"ContainerStarted","Data":"fa84d0a785a71b27a60c233495af9bd072a09d154ac83d48fcbd3f10b918817d"} Feb 02 11:17:28 crc kubenswrapper[4838]: I0202 11:17:28.931014 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-795f4db4bc-ls245" podStartSLOduration=2.930998153 podStartE2EDuration="2.930998153s" podCreationTimestamp="2026-02-02 11:17:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:17:28.927998804 +0000 UTC m=+1443.265099832" watchObservedRunningTime="2026-02-02 11:17:28.930998153 +0000 UTC m=+1443.268099181" Feb 02 11:17:29 crc kubenswrapper[4838]: I0202 11:17:29.923753 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f9a412e8-72a9-4e5b-a609-04551277d08c","Type":"ContainerStarted","Data":"4eb9a3bce94dca00cb4ad843b784736386fdf14e5dd11620535ac2c2fd9b7508"} Feb 02 11:17:29 crc kubenswrapper[4838]: I0202 11:17:29.923864 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="f9a412e8-72a9-4e5b-a609-04551277d08c" containerName="cinder-api-log" containerID="cri-o://fa84d0a785a71b27a60c233495af9bd072a09d154ac83d48fcbd3f10b918817d" gracePeriod=30 Feb 02 11:17:29 crc kubenswrapper[4838]: I0202 11:17:29.924152 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Feb 02 11:17:29 crc kubenswrapper[4838]: I0202 11:17:29.923908 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="f9a412e8-72a9-4e5b-a609-04551277d08c" containerName="cinder-api" containerID="cri-o://4eb9a3bce94dca00cb4ad843b784736386fdf14e5dd11620535ac2c2fd9b7508" gracePeriod=30 Feb 02 11:17:29 crc kubenswrapper[4838]: I0202 11:17:29.948185 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.948164786 podStartE2EDuration="3.948164786s" podCreationTimestamp="2026-02-02 11:17:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:17:29.946806551 +0000 UTC m=+1444.283907579" watchObservedRunningTime="2026-02-02 11:17:29.948164786 +0000 UTC m=+1444.285265834" Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.616256 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.769929 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9a412e8-72a9-4e5b-a609-04551277d08c-logs\") pod \"f9a412e8-72a9-4e5b-a609-04551277d08c\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.769989 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-scripts\") pod \"f9a412e8-72a9-4e5b-a609-04551277d08c\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.770034 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-config-data-custom\") pod \"f9a412e8-72a9-4e5b-a609-04551277d08c\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.770486 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-config-data\") pod \"f9a412e8-72a9-4e5b-a609-04551277d08c\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.770533 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wfds\" (UniqueName: \"kubernetes.io/projected/f9a412e8-72a9-4e5b-a609-04551277d08c-kube-api-access-4wfds\") pod \"f9a412e8-72a9-4e5b-a609-04551277d08c\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.770669 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f9a412e8-72a9-4e5b-a609-04551277d08c-etc-machine-id\") pod \"f9a412e8-72a9-4e5b-a609-04551277d08c\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.770739 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-combined-ca-bundle\") pod \"f9a412e8-72a9-4e5b-a609-04551277d08c\" (UID: \"f9a412e8-72a9-4e5b-a609-04551277d08c\") " Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.776266 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9a412e8-72a9-4e5b-a609-04551277d08c-logs" (OuterVolumeSpecName: "logs") pod "f9a412e8-72a9-4e5b-a609-04551277d08c" (UID: "f9a412e8-72a9-4e5b-a609-04551277d08c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.779834 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9a412e8-72a9-4e5b-a609-04551277d08c-kube-api-access-4wfds" (OuterVolumeSpecName: "kube-api-access-4wfds") pod "f9a412e8-72a9-4e5b-a609-04551277d08c" (UID: "f9a412e8-72a9-4e5b-a609-04551277d08c"). InnerVolumeSpecName "kube-api-access-4wfds". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.780652 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f9a412e8-72a9-4e5b-a609-04551277d08c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f9a412e8-72a9-4e5b-a609-04551277d08c" (UID: "f9a412e8-72a9-4e5b-a609-04551277d08c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.780918 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f9a412e8-72a9-4e5b-a609-04551277d08c" (UID: "f9a412e8-72a9-4e5b-a609-04551277d08c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.785483 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-scripts" (OuterVolumeSpecName: "scripts") pod "f9a412e8-72a9-4e5b-a609-04551277d08c" (UID: "f9a412e8-72a9-4e5b-a609-04551277d08c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.800250 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f9a412e8-72a9-4e5b-a609-04551277d08c" (UID: "f9a412e8-72a9-4e5b-a609-04551277d08c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.836916 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-config-data" (OuterVolumeSpecName: "config-data") pod "f9a412e8-72a9-4e5b-a609-04551277d08c" (UID: "f9a412e8-72a9-4e5b-a609-04551277d08c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.873261 4838 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f9a412e8-72a9-4e5b-a609-04551277d08c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.873294 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.873303 4838 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9a412e8-72a9-4e5b-a609-04551277d08c-logs\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.873311 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.873319 4838 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.873329 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9a412e8-72a9-4e5b-a609-04551277d08c-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.873337 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wfds\" (UniqueName: \"kubernetes.io/projected/f9a412e8-72a9-4e5b-a609-04551277d08c-kube-api-access-4wfds\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.955661 4838 generic.go:334] "Generic (PLEG): container finished" podID="f9a412e8-72a9-4e5b-a609-04551277d08c" containerID="4eb9a3bce94dca00cb4ad843b784736386fdf14e5dd11620535ac2c2fd9b7508" exitCode=0 Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.955703 4838 generic.go:334] "Generic (PLEG): container finished" podID="f9a412e8-72a9-4e5b-a609-04551277d08c" containerID="fa84d0a785a71b27a60c233495af9bd072a09d154ac83d48fcbd3f10b918817d" exitCode=143 Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.955726 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f9a412e8-72a9-4e5b-a609-04551277d08c","Type":"ContainerDied","Data":"4eb9a3bce94dca00cb4ad843b784736386fdf14e5dd11620535ac2c2fd9b7508"} Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.955753 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f9a412e8-72a9-4e5b-a609-04551277d08c","Type":"ContainerDied","Data":"fa84d0a785a71b27a60c233495af9bd072a09d154ac83d48fcbd3f10b918817d"} Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.955763 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f9a412e8-72a9-4e5b-a609-04551277d08c","Type":"ContainerDied","Data":"d0d12ebdf58f08f6fa7efa285bcba07b0ef99e9665f52a7e728cd3fe841dc758"} Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.955777 4838 scope.go:117] "RemoveContainer" containerID="4eb9a3bce94dca00cb4ad843b784736386fdf14e5dd11620535ac2c2fd9b7508" Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.955904 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.991284 4838 scope.go:117] "RemoveContainer" containerID="fa84d0a785a71b27a60c233495af9bd072a09d154ac83d48fcbd3f10b918817d" Feb 02 11:17:30 crc kubenswrapper[4838]: I0202 11:17:30.996533 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.017380 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.030562 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Feb 02 11:17:31 crc kubenswrapper[4838]: E0202 11:17:31.030995 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a412e8-72a9-4e5b-a609-04551277d08c" containerName="cinder-api-log" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.031014 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a412e8-72a9-4e5b-a609-04551277d08c" containerName="cinder-api-log" Feb 02 11:17:31 crc kubenswrapper[4838]: E0202 11:17:31.031044 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a412e8-72a9-4e5b-a609-04551277d08c" containerName="cinder-api" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.031050 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a412e8-72a9-4e5b-a609-04551277d08c" containerName="cinder-api" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.031235 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a412e8-72a9-4e5b-a609-04551277d08c" containerName="cinder-api-log" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.031248 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a412e8-72a9-4e5b-a609-04551277d08c" containerName="cinder-api" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.032805 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.041100 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.043809 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.044067 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.044218 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.117149 4838 scope.go:117] "RemoveContainer" containerID="4eb9a3bce94dca00cb4ad843b784736386fdf14e5dd11620535ac2c2fd9b7508" Feb 02 11:17:31 crc kubenswrapper[4838]: E0202 11:17:31.117964 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4eb9a3bce94dca00cb4ad843b784736386fdf14e5dd11620535ac2c2fd9b7508\": container with ID starting with 4eb9a3bce94dca00cb4ad843b784736386fdf14e5dd11620535ac2c2fd9b7508 not found: ID does not exist" containerID="4eb9a3bce94dca00cb4ad843b784736386fdf14e5dd11620535ac2c2fd9b7508" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.118029 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4eb9a3bce94dca00cb4ad843b784736386fdf14e5dd11620535ac2c2fd9b7508"} err="failed to get container status \"4eb9a3bce94dca00cb4ad843b784736386fdf14e5dd11620535ac2c2fd9b7508\": rpc error: code = NotFound desc = could not find container \"4eb9a3bce94dca00cb4ad843b784736386fdf14e5dd11620535ac2c2fd9b7508\": container with ID starting with 4eb9a3bce94dca00cb4ad843b784736386fdf14e5dd11620535ac2c2fd9b7508 not found: ID does not exist" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.118090 4838 scope.go:117] "RemoveContainer" containerID="fa84d0a785a71b27a60c233495af9bd072a09d154ac83d48fcbd3f10b918817d" Feb 02 11:17:31 crc kubenswrapper[4838]: E0202 11:17:31.118695 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa84d0a785a71b27a60c233495af9bd072a09d154ac83d48fcbd3f10b918817d\": container with ID starting with fa84d0a785a71b27a60c233495af9bd072a09d154ac83d48fcbd3f10b918817d not found: ID does not exist" containerID="fa84d0a785a71b27a60c233495af9bd072a09d154ac83d48fcbd3f10b918817d" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.119285 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa84d0a785a71b27a60c233495af9bd072a09d154ac83d48fcbd3f10b918817d"} err="failed to get container status \"fa84d0a785a71b27a60c233495af9bd072a09d154ac83d48fcbd3f10b918817d\": rpc error: code = NotFound desc = could not find container \"fa84d0a785a71b27a60c233495af9bd072a09d154ac83d48fcbd3f10b918817d\": container with ID starting with fa84d0a785a71b27a60c233495af9bd072a09d154ac83d48fcbd3f10b918817d not found: ID does not exist" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.119324 4838 scope.go:117] "RemoveContainer" containerID="4eb9a3bce94dca00cb4ad843b784736386fdf14e5dd11620535ac2c2fd9b7508" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.121291 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4eb9a3bce94dca00cb4ad843b784736386fdf14e5dd11620535ac2c2fd9b7508"} err="failed to get container status \"4eb9a3bce94dca00cb4ad843b784736386fdf14e5dd11620535ac2c2fd9b7508\": rpc error: code = NotFound desc = could not find container \"4eb9a3bce94dca00cb4ad843b784736386fdf14e5dd11620535ac2c2fd9b7508\": container with ID starting with 4eb9a3bce94dca00cb4ad843b784736386fdf14e5dd11620535ac2c2fd9b7508 not found: ID does not exist" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.121346 4838 scope.go:117] "RemoveContainer" containerID="fa84d0a785a71b27a60c233495af9bd072a09d154ac83d48fcbd3f10b918817d" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.122920 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa84d0a785a71b27a60c233495af9bd072a09d154ac83d48fcbd3f10b918817d"} err="failed to get container status \"fa84d0a785a71b27a60c233495af9bd072a09d154ac83d48fcbd3f10b918817d\": rpc error: code = NotFound desc = could not find container \"fa84d0a785a71b27a60c233495af9bd072a09d154ac83d48fcbd3f10b918817d\": container with ID starting with fa84d0a785a71b27a60c233495af9bd072a09d154ac83d48fcbd3f10b918817d not found: ID does not exist" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.176931 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0189f310-62ee-4f4d-b618-5afac393ff30-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.177001 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0189f310-62ee-4f4d-b618-5afac393ff30-logs\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.177023 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0189f310-62ee-4f4d-b618-5afac393ff30-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.177081 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0189f310-62ee-4f4d-b618-5afac393ff30-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.177106 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0189f310-62ee-4f4d-b618-5afac393ff30-scripts\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.177137 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0189f310-62ee-4f4d-b618-5afac393ff30-config-data-custom\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.177157 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0189f310-62ee-4f4d-b618-5afac393ff30-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.177174 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0189f310-62ee-4f4d-b618-5afac393ff30-config-data\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.177215 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khx94\" (UniqueName: \"kubernetes.io/projected/0189f310-62ee-4f4d-b618-5afac393ff30-kube-api-access-khx94\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.278920 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khx94\" (UniqueName: \"kubernetes.io/projected/0189f310-62ee-4f4d-b618-5afac393ff30-kube-api-access-khx94\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.278993 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0189f310-62ee-4f4d-b618-5afac393ff30-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.279043 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0189f310-62ee-4f4d-b618-5afac393ff30-logs\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.279065 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0189f310-62ee-4f4d-b618-5afac393ff30-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.279135 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0189f310-62ee-4f4d-b618-5afac393ff30-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.279168 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0189f310-62ee-4f4d-b618-5afac393ff30-scripts\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.279203 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0189f310-62ee-4f4d-b618-5afac393ff30-config-data-custom\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.279232 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0189f310-62ee-4f4d-b618-5afac393ff30-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.279255 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0189f310-62ee-4f4d-b618-5afac393ff30-config-data\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.279480 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0189f310-62ee-4f4d-b618-5afac393ff30-logs\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.279530 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0189f310-62ee-4f4d-b618-5afac393ff30-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.289160 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0189f310-62ee-4f4d-b618-5afac393ff30-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.289300 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0189f310-62ee-4f4d-b618-5afac393ff30-config-data-custom\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.289470 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0189f310-62ee-4f4d-b618-5afac393ff30-scripts\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.289478 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0189f310-62ee-4f4d-b618-5afac393ff30-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.289525 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0189f310-62ee-4f4d-b618-5afac393ff30-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.289886 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0189f310-62ee-4f4d-b618-5afac393ff30-config-data\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.295386 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khx94\" (UniqueName: \"kubernetes.io/projected/0189f310-62ee-4f4d-b618-5afac393ff30-kube-api-access-khx94\") pod \"cinder-api-0\" (UID: \"0189f310-62ee-4f4d-b618-5afac393ff30\") " pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.420784 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.969512 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a","Type":"ContainerStarted","Data":"0c585d2ff6d5547c5f8707944e66e18611cbc738e11e63f0456f365ef723c9db"} Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.969861 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a","Type":"ContainerStarted","Data":"1df40958ee71ac9b856e1f84562bb116aa7cc554747b89a932e8d2f4ddca1b7d"} Feb 02 11:17:31 crc kubenswrapper[4838]: I0202 11:17:31.992577 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.043518847 podStartE2EDuration="5.992557607s" podCreationTimestamp="2026-02-02 11:17:26 +0000 UTC" firstStartedPulling="2026-02-02 11:17:27.028184395 +0000 UTC m=+1441.365285423" lastFinishedPulling="2026-02-02 11:17:29.977223135 +0000 UTC m=+1444.314324183" observedRunningTime="2026-02-02 11:17:31.990577545 +0000 UTC m=+1446.327678583" watchObservedRunningTime="2026-02-02 11:17:31.992557607 +0000 UTC m=+1446.329658635" Feb 02 11:17:32 crc kubenswrapper[4838]: I0202 11:17:32.055121 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 02 11:17:32 crc kubenswrapper[4838]: I0202 11:17:32.520318 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9a412e8-72a9-4e5b-a609-04551277d08c" path="/var/lib/kubelet/pods/f9a412e8-72a9-4e5b-a609-04551277d08c/volumes" Feb 02 11:17:32 crc kubenswrapper[4838]: I0202 11:17:32.982690 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0189f310-62ee-4f4d-b618-5afac393ff30","Type":"ContainerStarted","Data":"a78b82adeb5ea27eccdc9e6af788d0690fda572e255384ccd5d467d1c0b81710"} Feb 02 11:17:32 crc kubenswrapper[4838]: I0202 11:17:32.983439 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0189f310-62ee-4f4d-b618-5afac393ff30","Type":"ContainerStarted","Data":"fdb4f9ed7707f8f7c770ccf8240f87c30aca035ec7baefe5ece5998b1e74c29d"} Feb 02 11:17:33 crc kubenswrapper[4838]: I0202 11:17:33.993665 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0189f310-62ee-4f4d-b618-5afac393ff30","Type":"ContainerStarted","Data":"091d8365d77499c2b7bb65414d38dfcb91f01b3ca64ce4e5e6d9386ee0320c16"} Feb 02 11:17:33 crc kubenswrapper[4838]: I0202 11:17:33.994766 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Feb 02 11:17:34 crc kubenswrapper[4838]: I0202 11:17:34.028018 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.027997191 podStartE2EDuration="4.027997191s" podCreationTimestamp="2026-02-02 11:17:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:17:34.0188683 +0000 UTC m=+1448.355969328" watchObservedRunningTime="2026-02-02 11:17:34.027997191 +0000 UTC m=+1448.365098219" Feb 02 11:17:36 crc kubenswrapper[4838]: I0202 11:17:36.518102 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Feb 02 11:17:36 crc kubenswrapper[4838]: I0202 11:17:36.597806 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:17:36 crc kubenswrapper[4838]: I0202 11:17:36.653881 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-z48qj"] Feb 02 11:17:36 crc kubenswrapper[4838]: I0202 11:17:36.654138 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" podUID="182e6c68-3df4-42a6-ba33-e5b7ffbe8f40" containerName="dnsmasq-dns" containerID="cri-o://97e346f34790565e1b0bc34bcc83af1a6aed322f148ac649e8f245aba9672915" gracePeriod=10 Feb 02 11:17:36 crc kubenswrapper[4838]: I0202 11:17:36.910309 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.027261 4838 generic.go:334] "Generic (PLEG): container finished" podID="182e6c68-3df4-42a6-ba33-e5b7ffbe8f40" containerID="97e346f34790565e1b0bc34bcc83af1a6aed322f148ac649e8f245aba9672915" exitCode=0 Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.027436 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" event={"ID":"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40","Type":"ContainerDied","Data":"97e346f34790565e1b0bc34bcc83af1a6aed322f148ac649e8f245aba9672915"} Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.072462 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.327031 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.414551 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-ovsdbserver-sb\") pod \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.414666 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-dns-svc\") pod \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.414698 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-dns-swift-storage-0\") pod \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.414818 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-config\") pod \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.414847 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xfjk2\" (UniqueName: \"kubernetes.io/projected/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-kube-api-access-xfjk2\") pod \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.414881 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-ovsdbserver-nb\") pod \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\" (UID: \"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40\") " Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.422189 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-kube-api-access-xfjk2" (OuterVolumeSpecName: "kube-api-access-xfjk2") pod "182e6c68-3df4-42a6-ba33-e5b7ffbe8f40" (UID: "182e6c68-3df4-42a6-ba33-e5b7ffbe8f40"). InnerVolumeSpecName "kube-api-access-xfjk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.472427 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "182e6c68-3df4-42a6-ba33-e5b7ffbe8f40" (UID: "182e6c68-3df4-42a6-ba33-e5b7ffbe8f40"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.472735 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "182e6c68-3df4-42a6-ba33-e5b7ffbe8f40" (UID: "182e6c68-3df4-42a6-ba33-e5b7ffbe8f40"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.475893 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "182e6c68-3df4-42a6-ba33-e5b7ffbe8f40" (UID: "182e6c68-3df4-42a6-ba33-e5b7ffbe8f40"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.477169 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-config" (OuterVolumeSpecName: "config") pod "182e6c68-3df4-42a6-ba33-e5b7ffbe8f40" (UID: "182e6c68-3df4-42a6-ba33-e5b7ffbe8f40"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.481030 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "182e6c68-3df4-42a6-ba33-e5b7ffbe8f40" (UID: "182e6c68-3df4-42a6-ba33-e5b7ffbe8f40"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.519252 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.519331 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xfjk2\" (UniqueName: \"kubernetes.io/projected/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-kube-api-access-xfjk2\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.519347 4838 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.519379 4838 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.519392 4838 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:37 crc kubenswrapper[4838]: I0202 11:17:37.519406 4838 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:38 crc kubenswrapper[4838]: I0202 11:17:38.056044 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" event={"ID":"182e6c68-3df4-42a6-ba33-e5b7ffbe8f40","Type":"ContainerDied","Data":"a5de07ad9dde56678bf71535ce8d3631424be7e19eb08c8a323649a2383f590c"} Feb 02 11:17:38 crc kubenswrapper[4838]: I0202 11:17:38.056120 4838 scope.go:117] "RemoveContainer" containerID="97e346f34790565e1b0bc34bcc83af1a6aed322f148ac649e8f245aba9672915" Feb 02 11:17:38 crc kubenswrapper[4838]: I0202 11:17:38.056134 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586bdc5f9-z48qj" Feb 02 11:17:38 crc kubenswrapper[4838]: I0202 11:17:38.056348 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="cda9be5c-b29c-4ae7-93fc-3a1d6d98415a" containerName="probe" containerID="cri-o://0c585d2ff6d5547c5f8707944e66e18611cbc738e11e63f0456f365ef723c9db" gracePeriod=30 Feb 02 11:17:38 crc kubenswrapper[4838]: I0202 11:17:38.056312 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="cda9be5c-b29c-4ae7-93fc-3a1d6d98415a" containerName="cinder-scheduler" containerID="cri-o://1df40958ee71ac9b856e1f84562bb116aa7cc554747b89a932e8d2f4ddca1b7d" gracePeriod=30 Feb 02 11:17:38 crc kubenswrapper[4838]: I0202 11:17:38.086900 4838 scope.go:117] "RemoveContainer" containerID="9c6cc28d011a5de1d1e3cd3db4675ae585ba4453035f010600b5c6580fe47653" Feb 02 11:17:38 crc kubenswrapper[4838]: I0202 11:17:38.106564 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-z48qj"] Feb 02 11:17:38 crc kubenswrapper[4838]: I0202 11:17:38.113943 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-z48qj"] Feb 02 11:17:38 crc kubenswrapper[4838]: I0202 11:17:38.518740 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="182e6c68-3df4-42a6-ba33-e5b7ffbe8f40" path="/var/lib/kubelet/pods/182e6c68-3df4-42a6-ba33-e5b7ffbe8f40/volumes" Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.084505 4838 generic.go:334] "Generic (PLEG): container finished" podID="cda9be5c-b29c-4ae7-93fc-3a1d6d98415a" containerID="0c585d2ff6d5547c5f8707944e66e18611cbc738e11e63f0456f365ef723c9db" exitCode=0 Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.085253 4838 generic.go:334] "Generic (PLEG): container finished" podID="cda9be5c-b29c-4ae7-93fc-3a1d6d98415a" containerID="1df40958ee71ac9b856e1f84562bb116aa7cc554747b89a932e8d2f4ddca1b7d" exitCode=0 Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.084737 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a","Type":"ContainerDied","Data":"0c585d2ff6d5547c5f8707944e66e18611cbc738e11e63f0456f365ef723c9db"} Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.085317 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a","Type":"ContainerDied","Data":"1df40958ee71ac9b856e1f84562bb116aa7cc554747b89a932e8d2f4ddca1b7d"} Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.237903 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.368756 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-config-data\") pod \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.368983 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-etc-machine-id\") pod \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.369010 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-combined-ca-bundle\") pod \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.369055 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-scripts\") pod \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.369077 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-config-data-custom\") pod \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.369106 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bfg25\" (UniqueName: \"kubernetes.io/projected/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-kube-api-access-bfg25\") pod \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\" (UID: \"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a\") " Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.369843 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "cda9be5c-b29c-4ae7-93fc-3a1d6d98415a" (UID: "cda9be5c-b29c-4ae7-93fc-3a1d6d98415a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.370183 4838 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.375005 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-kube-api-access-bfg25" (OuterVolumeSpecName: "kube-api-access-bfg25") pod "cda9be5c-b29c-4ae7-93fc-3a1d6d98415a" (UID: "cda9be5c-b29c-4ae7-93fc-3a1d6d98415a"). InnerVolumeSpecName "kube-api-access-bfg25". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.377434 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-scripts" (OuterVolumeSpecName: "scripts") pod "cda9be5c-b29c-4ae7-93fc-3a1d6d98415a" (UID: "cda9be5c-b29c-4ae7-93fc-3a1d6d98415a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.380807 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "cda9be5c-b29c-4ae7-93fc-3a1d6d98415a" (UID: "cda9be5c-b29c-4ae7-93fc-3a1d6d98415a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.449780 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cda9be5c-b29c-4ae7-93fc-3a1d6d98415a" (UID: "cda9be5c-b29c-4ae7-93fc-3a1d6d98415a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.472422 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.472672 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.472764 4838 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.472848 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bfg25\" (UniqueName: \"kubernetes.io/projected/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-kube-api-access-bfg25\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.482392 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-config-data" (OuterVolumeSpecName: "config-data") pod "cda9be5c-b29c-4ae7-93fc-3a1d6d98415a" (UID: "cda9be5c-b29c-4ae7-93fc-3a1d6d98415a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:40 crc kubenswrapper[4838]: I0202 11:17:40.574964 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.097415 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"cda9be5c-b29c-4ae7-93fc-3a1d6d98415a","Type":"ContainerDied","Data":"be40cd6c86cfe6d5f224c049d7a914b866f0c228f7188d6100f6a832fea6670e"} Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.097463 4838 scope.go:117] "RemoveContainer" containerID="0c585d2ff6d5547c5f8707944e66e18611cbc738e11e63f0456f365ef723c9db" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.097485 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.124456 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.124582 4838 scope.go:117] "RemoveContainer" containerID="1df40958ee71ac9b856e1f84562bb116aa7cc554747b89a932e8d2f4ddca1b7d" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.142538 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.156005 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 11:17:41 crc kubenswrapper[4838]: E0202 11:17:41.156415 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="182e6c68-3df4-42a6-ba33-e5b7ffbe8f40" containerName="dnsmasq-dns" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.156432 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="182e6c68-3df4-42a6-ba33-e5b7ffbe8f40" containerName="dnsmasq-dns" Feb 02 11:17:41 crc kubenswrapper[4838]: E0202 11:17:41.156443 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cda9be5c-b29c-4ae7-93fc-3a1d6d98415a" containerName="probe" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.156449 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="cda9be5c-b29c-4ae7-93fc-3a1d6d98415a" containerName="probe" Feb 02 11:17:41 crc kubenswrapper[4838]: E0202 11:17:41.156465 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="182e6c68-3df4-42a6-ba33-e5b7ffbe8f40" containerName="init" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.156473 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="182e6c68-3df4-42a6-ba33-e5b7ffbe8f40" containerName="init" Feb 02 11:17:41 crc kubenswrapper[4838]: E0202 11:17:41.156491 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cda9be5c-b29c-4ae7-93fc-3a1d6d98415a" containerName="cinder-scheduler" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.156497 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="cda9be5c-b29c-4ae7-93fc-3a1d6d98415a" containerName="cinder-scheduler" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.156693 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="cda9be5c-b29c-4ae7-93fc-3a1d6d98415a" containerName="cinder-scheduler" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.156719 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="182e6c68-3df4-42a6-ba33-e5b7ffbe8f40" containerName="dnsmasq-dns" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.156730 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="cda9be5c-b29c-4ae7-93fc-3a1d6d98415a" containerName="probe" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.157706 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.161252 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.169090 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.289113 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vs4h\" (UniqueName: \"kubernetes.io/projected/2c000131-c578-473f-8758-95ae23e12d3a-kube-api-access-7vs4h\") pod \"cinder-scheduler-0\" (UID: \"2c000131-c578-473f-8758-95ae23e12d3a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.289186 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c000131-c578-473f-8758-95ae23e12d3a-config-data\") pod \"cinder-scheduler-0\" (UID: \"2c000131-c578-473f-8758-95ae23e12d3a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.289215 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c000131-c578-473f-8758-95ae23e12d3a-scripts\") pod \"cinder-scheduler-0\" (UID: \"2c000131-c578-473f-8758-95ae23e12d3a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.289233 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2c000131-c578-473f-8758-95ae23e12d3a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"2c000131-c578-473f-8758-95ae23e12d3a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.289298 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c000131-c578-473f-8758-95ae23e12d3a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"2c000131-c578-473f-8758-95ae23e12d3a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.289321 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2c000131-c578-473f-8758-95ae23e12d3a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"2c000131-c578-473f-8758-95ae23e12d3a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.390550 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c000131-c578-473f-8758-95ae23e12d3a-config-data\") pod \"cinder-scheduler-0\" (UID: \"2c000131-c578-473f-8758-95ae23e12d3a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.390639 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c000131-c578-473f-8758-95ae23e12d3a-scripts\") pod \"cinder-scheduler-0\" (UID: \"2c000131-c578-473f-8758-95ae23e12d3a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.390711 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2c000131-c578-473f-8758-95ae23e12d3a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"2c000131-c578-473f-8758-95ae23e12d3a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.390909 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2c000131-c578-473f-8758-95ae23e12d3a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"2c000131-c578-473f-8758-95ae23e12d3a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.390837 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c000131-c578-473f-8758-95ae23e12d3a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"2c000131-c578-473f-8758-95ae23e12d3a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.391080 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2c000131-c578-473f-8758-95ae23e12d3a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"2c000131-c578-473f-8758-95ae23e12d3a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.391230 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vs4h\" (UniqueName: \"kubernetes.io/projected/2c000131-c578-473f-8758-95ae23e12d3a-kube-api-access-7vs4h\") pod \"cinder-scheduler-0\" (UID: \"2c000131-c578-473f-8758-95ae23e12d3a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.407035 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c000131-c578-473f-8758-95ae23e12d3a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"2c000131-c578-473f-8758-95ae23e12d3a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.407302 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c000131-c578-473f-8758-95ae23e12d3a-config-data\") pod \"cinder-scheduler-0\" (UID: \"2c000131-c578-473f-8758-95ae23e12d3a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.407441 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c000131-c578-473f-8758-95ae23e12d3a-scripts\") pod \"cinder-scheduler-0\" (UID: \"2c000131-c578-473f-8758-95ae23e12d3a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.407940 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2c000131-c578-473f-8758-95ae23e12d3a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"2c000131-c578-473f-8758-95ae23e12d3a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.409379 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vs4h\" (UniqueName: \"kubernetes.io/projected/2c000131-c578-473f-8758-95ae23e12d3a-kube-api-access-7vs4h\") pod \"cinder-scheduler-0\" (UID: \"2c000131-c578-473f-8758-95ae23e12d3a\") " pod="openstack/cinder-scheduler-0" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.504894 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 11:17:41 crc kubenswrapper[4838]: I0202 11:17:41.962888 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 11:17:41 crc kubenswrapper[4838]: W0202 11:17:41.966830 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2c000131_c578_473f_8758_95ae23e12d3a.slice/crio-358aed3fb468f0f7fd89b126463d7dfd2ab6d28aa62a6730fd7197f0cce2f799 WatchSource:0}: Error finding container 358aed3fb468f0f7fd89b126463d7dfd2ab6d28aa62a6730fd7197f0cce2f799: Status 404 returned error can't find the container with id 358aed3fb468f0f7fd89b126463d7dfd2ab6d28aa62a6730fd7197f0cce2f799 Feb 02 11:17:42 crc kubenswrapper[4838]: I0202 11:17:42.110675 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2c000131-c578-473f-8758-95ae23e12d3a","Type":"ContainerStarted","Data":"358aed3fb468f0f7fd89b126463d7dfd2ab6d28aa62a6730fd7197f0cce2f799"} Feb 02 11:17:42 crc kubenswrapper[4838]: I0202 11:17:42.517456 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cda9be5c-b29c-4ae7-93fc-3a1d6d98415a" path="/var/lib/kubelet/pods/cda9be5c-b29c-4ae7-93fc-3a1d6d98415a/volumes" Feb 02 11:17:43 crc kubenswrapper[4838]: I0202 11:17:43.093788 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 02 11:17:43 crc kubenswrapper[4838]: I0202 11:17:43.141732 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2c000131-c578-473f-8758-95ae23e12d3a","Type":"ContainerStarted","Data":"32a47935ebd2178a03324bb6eb3d09da089a8c8f8013c6fd1c3346aec9a11802"} Feb 02 11:17:43 crc kubenswrapper[4838]: I0202 11:17:43.620420 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Feb 02 11:17:44 crc kubenswrapper[4838]: I0202 11:17:44.151787 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2c000131-c578-473f-8758-95ae23e12d3a","Type":"ContainerStarted","Data":"20c3944a4ef4413eb0d41c78335630aca829880eb407479ee608caf6a50f2933"} Feb 02 11:17:44 crc kubenswrapper[4838]: I0202 11:17:44.176843 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.176825665 podStartE2EDuration="3.176825665s" podCreationTimestamp="2026-02-02 11:17:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:17:44.170878138 +0000 UTC m=+1458.507979186" watchObservedRunningTime="2026-02-02 11:17:44.176825665 +0000 UTC m=+1458.513926693" Feb 02 11:17:44 crc kubenswrapper[4838]: I0202 11:17:44.799479 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:17:44 crc kubenswrapper[4838]: I0202 11:17:44.799745 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4d3847cc-dec5-4288-9404-f55d46551d3b" containerName="ceilometer-central-agent" containerID="cri-o://a34d621cce8ea7a9cc036344dcfc1640acb49e269bec9ef7210f9133c0c20d37" gracePeriod=30 Feb 02 11:17:44 crc kubenswrapper[4838]: I0202 11:17:44.799832 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4d3847cc-dec5-4288-9404-f55d46551d3b" containerName="sg-core" containerID="cri-o://5cd43ccbc62626140f99f579266a435ba7800794b615ede07dcc2ad86188d33a" gracePeriod=30 Feb 02 11:17:44 crc kubenswrapper[4838]: I0202 11:17:44.799884 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4d3847cc-dec5-4288-9404-f55d46551d3b" containerName="ceilometer-notification-agent" containerID="cri-o://639f7870ea82c3ab973fd106afad9e967702bca07ebcc115f428b525da7f500c" gracePeriod=30 Feb 02 11:17:44 crc kubenswrapper[4838]: I0202 11:17:44.799896 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4d3847cc-dec5-4288-9404-f55d46551d3b" containerName="proxy-httpd" containerID="cri-o://35ffb794e2bbaf2a8b4908b75293dc8a32fea3a1b85f4d7f08cd57c293ae310a" gracePeriod=30 Feb 02 11:17:45 crc kubenswrapper[4838]: I0202 11:17:45.166307 4838 generic.go:334] "Generic (PLEG): container finished" podID="4d3847cc-dec5-4288-9404-f55d46551d3b" containerID="35ffb794e2bbaf2a8b4908b75293dc8a32fea3a1b85f4d7f08cd57c293ae310a" exitCode=0 Feb 02 11:17:45 crc kubenswrapper[4838]: I0202 11:17:45.166637 4838 generic.go:334] "Generic (PLEG): container finished" podID="4d3847cc-dec5-4288-9404-f55d46551d3b" containerID="5cd43ccbc62626140f99f579266a435ba7800794b615ede07dcc2ad86188d33a" exitCode=2 Feb 02 11:17:45 crc kubenswrapper[4838]: I0202 11:17:45.166384 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4d3847cc-dec5-4288-9404-f55d46551d3b","Type":"ContainerDied","Data":"35ffb794e2bbaf2a8b4908b75293dc8a32fea3a1b85f4d7f08cd57c293ae310a"} Feb 02 11:17:45 crc kubenswrapper[4838]: I0202 11:17:45.166691 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4d3847cc-dec5-4288-9404-f55d46551d3b","Type":"ContainerDied","Data":"5cd43ccbc62626140f99f579266a435ba7800794b615ede07dcc2ad86188d33a"} Feb 02 11:17:45 crc kubenswrapper[4838]: I0202 11:17:45.539655 4838 scope.go:117] "RemoveContainer" containerID="032cee7f2352b3814216c51d96d3676f339a080dbc8bc6e165bf6b82886d612e" Feb 02 11:17:45 crc kubenswrapper[4838]: I0202 11:17:45.572014 4838 scope.go:117] "RemoveContainer" containerID="ee21caf870be62380ef54f8dab0bbebb35d85ef7d0f39eeb969c057a9fbad71f" Feb 02 11:17:46 crc kubenswrapper[4838]: I0202 11:17:46.177331 4838 generic.go:334] "Generic (PLEG): container finished" podID="4d3847cc-dec5-4288-9404-f55d46551d3b" containerID="a34d621cce8ea7a9cc036344dcfc1640acb49e269bec9ef7210f9133c0c20d37" exitCode=0 Feb 02 11:17:46 crc kubenswrapper[4838]: I0202 11:17:46.177395 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4d3847cc-dec5-4288-9404-f55d46551d3b","Type":"ContainerDied","Data":"a34d621cce8ea7a9cc036344dcfc1640acb49e269bec9ef7210f9133c0c20d37"} Feb 02 11:17:46 crc kubenswrapper[4838]: I0202 11:17:46.522722 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.183776 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.220505 4838 generic.go:334] "Generic (PLEG): container finished" podID="4d3847cc-dec5-4288-9404-f55d46551d3b" containerID="639f7870ea82c3ab973fd106afad9e967702bca07ebcc115f428b525da7f500c" exitCode=0 Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.220645 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.220880 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4d3847cc-dec5-4288-9404-f55d46551d3b","Type":"ContainerDied","Data":"639f7870ea82c3ab973fd106afad9e967702bca07ebcc115f428b525da7f500c"} Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.221009 4838 scope.go:117] "RemoveContainer" containerID="35ffb794e2bbaf2a8b4908b75293dc8a32fea3a1b85f4d7f08cd57c293ae310a" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.221140 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4d3847cc-dec5-4288-9404-f55d46551d3b","Type":"ContainerDied","Data":"2b7cc56b124b16c775423101436ce99919089e503a50b7f4ce56629122aa8a20"} Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.246538 4838 scope.go:117] "RemoveContainer" containerID="5cd43ccbc62626140f99f579266a435ba7800794b615ede07dcc2ad86188d33a" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.273647 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4d3847cc-dec5-4288-9404-f55d46551d3b-run-httpd\") pod \"4d3847cc-dec5-4288-9404-f55d46551d3b\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.273771 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-sg-core-conf-yaml\") pod \"4d3847cc-dec5-4288-9404-f55d46551d3b\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.273825 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-combined-ca-bundle\") pod \"4d3847cc-dec5-4288-9404-f55d46551d3b\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.273848 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-ceilometer-tls-certs\") pod \"4d3847cc-dec5-4288-9404-f55d46551d3b\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.273911 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4d3847cc-dec5-4288-9404-f55d46551d3b-log-httpd\") pod \"4d3847cc-dec5-4288-9404-f55d46551d3b\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.273945 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-config-data\") pod \"4d3847cc-dec5-4288-9404-f55d46551d3b\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.274058 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-scripts\") pod \"4d3847cc-dec5-4288-9404-f55d46551d3b\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.274101 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qpbmp\" (UniqueName: \"kubernetes.io/projected/4d3847cc-dec5-4288-9404-f55d46551d3b-kube-api-access-qpbmp\") pod \"4d3847cc-dec5-4288-9404-f55d46551d3b\" (UID: \"4d3847cc-dec5-4288-9404-f55d46551d3b\") " Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.284090 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d3847cc-dec5-4288-9404-f55d46551d3b-kube-api-access-qpbmp" (OuterVolumeSpecName: "kube-api-access-qpbmp") pod "4d3847cc-dec5-4288-9404-f55d46551d3b" (UID: "4d3847cc-dec5-4288-9404-f55d46551d3b"). InnerVolumeSpecName "kube-api-access-qpbmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.284739 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d3847cc-dec5-4288-9404-f55d46551d3b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "4d3847cc-dec5-4288-9404-f55d46551d3b" (UID: "4d3847cc-dec5-4288-9404-f55d46551d3b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.286068 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d3847cc-dec5-4288-9404-f55d46551d3b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "4d3847cc-dec5-4288-9404-f55d46551d3b" (UID: "4d3847cc-dec5-4288-9404-f55d46551d3b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.299180 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-scripts" (OuterVolumeSpecName: "scripts") pod "4d3847cc-dec5-4288-9404-f55d46551d3b" (UID: "4d3847cc-dec5-4288-9404-f55d46551d3b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.318815 4838 scope.go:117] "RemoveContainer" containerID="639f7870ea82c3ab973fd106afad9e967702bca07ebcc115f428b525da7f500c" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.325200 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "4d3847cc-dec5-4288-9404-f55d46551d3b" (UID: "4d3847cc-dec5-4288-9404-f55d46551d3b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.346341 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "4d3847cc-dec5-4288-9404-f55d46551d3b" (UID: "4d3847cc-dec5-4288-9404-f55d46551d3b"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.376503 4838 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4d3847cc-dec5-4288-9404-f55d46551d3b-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.376669 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.376684 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qpbmp\" (UniqueName: \"kubernetes.io/projected/4d3847cc-dec5-4288-9404-f55d46551d3b-kube-api-access-qpbmp\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.376699 4838 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4d3847cc-dec5-4288-9404-f55d46551d3b-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.376710 4838 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.376720 4838 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.381513 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4d3847cc-dec5-4288-9404-f55d46551d3b" (UID: "4d3847cc-dec5-4288-9404-f55d46551d3b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.399453 4838 scope.go:117] "RemoveContainer" containerID="a34d621cce8ea7a9cc036344dcfc1640acb49e269bec9ef7210f9133c0c20d37" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.424581 4838 scope.go:117] "RemoveContainer" containerID="35ffb794e2bbaf2a8b4908b75293dc8a32fea3a1b85f4d7f08cd57c293ae310a" Feb 02 11:17:50 crc kubenswrapper[4838]: E0202 11:17:50.425244 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35ffb794e2bbaf2a8b4908b75293dc8a32fea3a1b85f4d7f08cd57c293ae310a\": container with ID starting with 35ffb794e2bbaf2a8b4908b75293dc8a32fea3a1b85f4d7f08cd57c293ae310a not found: ID does not exist" containerID="35ffb794e2bbaf2a8b4908b75293dc8a32fea3a1b85f4d7f08cd57c293ae310a" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.425289 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35ffb794e2bbaf2a8b4908b75293dc8a32fea3a1b85f4d7f08cd57c293ae310a"} err="failed to get container status \"35ffb794e2bbaf2a8b4908b75293dc8a32fea3a1b85f4d7f08cd57c293ae310a\": rpc error: code = NotFound desc = could not find container \"35ffb794e2bbaf2a8b4908b75293dc8a32fea3a1b85f4d7f08cd57c293ae310a\": container with ID starting with 35ffb794e2bbaf2a8b4908b75293dc8a32fea3a1b85f4d7f08cd57c293ae310a not found: ID does not exist" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.425322 4838 scope.go:117] "RemoveContainer" containerID="5cd43ccbc62626140f99f579266a435ba7800794b615ede07dcc2ad86188d33a" Feb 02 11:17:50 crc kubenswrapper[4838]: E0202 11:17:50.425774 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cd43ccbc62626140f99f579266a435ba7800794b615ede07dcc2ad86188d33a\": container with ID starting with 5cd43ccbc62626140f99f579266a435ba7800794b615ede07dcc2ad86188d33a not found: ID does not exist" containerID="5cd43ccbc62626140f99f579266a435ba7800794b615ede07dcc2ad86188d33a" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.425825 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cd43ccbc62626140f99f579266a435ba7800794b615ede07dcc2ad86188d33a"} err="failed to get container status \"5cd43ccbc62626140f99f579266a435ba7800794b615ede07dcc2ad86188d33a\": rpc error: code = NotFound desc = could not find container \"5cd43ccbc62626140f99f579266a435ba7800794b615ede07dcc2ad86188d33a\": container with ID starting with 5cd43ccbc62626140f99f579266a435ba7800794b615ede07dcc2ad86188d33a not found: ID does not exist" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.425862 4838 scope.go:117] "RemoveContainer" containerID="639f7870ea82c3ab973fd106afad9e967702bca07ebcc115f428b525da7f500c" Feb 02 11:17:50 crc kubenswrapper[4838]: E0202 11:17:50.426196 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"639f7870ea82c3ab973fd106afad9e967702bca07ebcc115f428b525da7f500c\": container with ID starting with 639f7870ea82c3ab973fd106afad9e967702bca07ebcc115f428b525da7f500c not found: ID does not exist" containerID="639f7870ea82c3ab973fd106afad9e967702bca07ebcc115f428b525da7f500c" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.426230 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"639f7870ea82c3ab973fd106afad9e967702bca07ebcc115f428b525da7f500c"} err="failed to get container status \"639f7870ea82c3ab973fd106afad9e967702bca07ebcc115f428b525da7f500c\": rpc error: code = NotFound desc = could not find container \"639f7870ea82c3ab973fd106afad9e967702bca07ebcc115f428b525da7f500c\": container with ID starting with 639f7870ea82c3ab973fd106afad9e967702bca07ebcc115f428b525da7f500c not found: ID does not exist" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.426246 4838 scope.go:117] "RemoveContainer" containerID="a34d621cce8ea7a9cc036344dcfc1640acb49e269bec9ef7210f9133c0c20d37" Feb 02 11:17:50 crc kubenswrapper[4838]: E0202 11:17:50.426537 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a34d621cce8ea7a9cc036344dcfc1640acb49e269bec9ef7210f9133c0c20d37\": container with ID starting with a34d621cce8ea7a9cc036344dcfc1640acb49e269bec9ef7210f9133c0c20d37 not found: ID does not exist" containerID="a34d621cce8ea7a9cc036344dcfc1640acb49e269bec9ef7210f9133c0c20d37" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.426564 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a34d621cce8ea7a9cc036344dcfc1640acb49e269bec9ef7210f9133c0c20d37"} err="failed to get container status \"a34d621cce8ea7a9cc036344dcfc1640acb49e269bec9ef7210f9133c0c20d37\": rpc error: code = NotFound desc = could not find container \"a34d621cce8ea7a9cc036344dcfc1640acb49e269bec9ef7210f9133c0c20d37\": container with ID starting with a34d621cce8ea7a9cc036344dcfc1640acb49e269bec9ef7210f9133c0c20d37 not found: ID does not exist" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.430830 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-config-data" (OuterVolumeSpecName: "config-data") pod "4d3847cc-dec5-4288-9404-f55d46551d3b" (UID: "4d3847cc-dec5-4288-9404-f55d46551d3b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.478709 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.478763 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d3847cc-dec5-4288-9404-f55d46551d3b-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.556810 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.568985 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.584901 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:17:50 crc kubenswrapper[4838]: E0202 11:17:50.585362 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d3847cc-dec5-4288-9404-f55d46551d3b" containerName="sg-core" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.585391 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d3847cc-dec5-4288-9404-f55d46551d3b" containerName="sg-core" Feb 02 11:17:50 crc kubenswrapper[4838]: E0202 11:17:50.585418 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d3847cc-dec5-4288-9404-f55d46551d3b" containerName="proxy-httpd" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.585427 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d3847cc-dec5-4288-9404-f55d46551d3b" containerName="proxy-httpd" Feb 02 11:17:50 crc kubenswrapper[4838]: E0202 11:17:50.585452 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d3847cc-dec5-4288-9404-f55d46551d3b" containerName="ceilometer-central-agent" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.585463 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d3847cc-dec5-4288-9404-f55d46551d3b" containerName="ceilometer-central-agent" Feb 02 11:17:50 crc kubenswrapper[4838]: E0202 11:17:50.585479 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d3847cc-dec5-4288-9404-f55d46551d3b" containerName="ceilometer-notification-agent" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.585488 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d3847cc-dec5-4288-9404-f55d46551d3b" containerName="ceilometer-notification-agent" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.585839 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d3847cc-dec5-4288-9404-f55d46551d3b" containerName="proxy-httpd" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.585869 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d3847cc-dec5-4288-9404-f55d46551d3b" containerName="ceilometer-notification-agent" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.585885 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d3847cc-dec5-4288-9404-f55d46551d3b" containerName="sg-core" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.585908 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d3847cc-dec5-4288-9404-f55d46551d3b" containerName="ceilometer-central-agent" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.588226 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.591050 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.591092 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.591698 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.603027 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.682523 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.682596 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-config-data\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.682951 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7j85p\" (UniqueName: \"kubernetes.io/projected/97943a79-c8f8-4259-8cb4-1845307b8628-kube-api-access-7j85p\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.683024 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.683411 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97943a79-c8f8-4259-8cb4-1845307b8628-log-httpd\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.683511 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-scripts\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.683547 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.683743 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97943a79-c8f8-4259-8cb4-1845307b8628-run-httpd\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.786009 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.786071 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-config-data\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.786117 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7j85p\" (UniqueName: \"kubernetes.io/projected/97943a79-c8f8-4259-8cb4-1845307b8628-kube-api-access-7j85p\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.786154 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.786287 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97943a79-c8f8-4259-8cb4-1845307b8628-log-httpd\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.786332 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-scripts\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.786386 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.786893 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97943a79-c8f8-4259-8cb4-1845307b8628-log-httpd\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.786593 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97943a79-c8f8-4259-8cb4-1845307b8628-run-httpd\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.787095 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97943a79-c8f8-4259-8cb4-1845307b8628-run-httpd\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.791755 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.800697 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.800960 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-scripts\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.801234 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.801311 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-config-data\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.809304 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7j85p\" (UniqueName: \"kubernetes.io/projected/97943a79-c8f8-4259-8cb4-1845307b8628-kube-api-access-7j85p\") pod \"ceilometer-0\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " pod="openstack/ceilometer-0" Feb 02 11:17:50 crc kubenswrapper[4838]: I0202 11:17:50.908202 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:17:51 crc kubenswrapper[4838]: I0202 11:17:51.415884 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:17:51 crc kubenswrapper[4838]: I0202 11:17:51.814242 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Feb 02 11:17:52 crc kubenswrapper[4838]: I0202 11:17:52.244448 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97943a79-c8f8-4259-8cb4-1845307b8628","Type":"ContainerStarted","Data":"cb0a57b9db10be48e1c47346667e71e318241cbfe536a72cc1d3de8a1eaf6d9e"} Feb 02 11:17:52 crc kubenswrapper[4838]: I0202 11:17:52.517148 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d3847cc-dec5-4288-9404-f55d46551d3b" path="/var/lib/kubelet/pods/4d3847cc-dec5-4288-9404-f55d46551d3b/volumes" Feb 02 11:17:53 crc kubenswrapper[4838]: I0202 11:17:53.256971 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97943a79-c8f8-4259-8cb4-1845307b8628","Type":"ContainerStarted","Data":"184d0cf646d577a21ee80dc9fec3b85f727789002da9c46224376e606800c9e6"} Feb 02 11:17:54 crc kubenswrapper[4838]: I0202 11:17:54.270578 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97943a79-c8f8-4259-8cb4-1845307b8628","Type":"ContainerStarted","Data":"f5f8664f861428624e0b00c05b07cf395430f2da744bea26e0edf808c7c6a84f"} Feb 02 11:17:54 crc kubenswrapper[4838]: I0202 11:17:54.271205 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97943a79-c8f8-4259-8cb4-1845307b8628","Type":"ContainerStarted","Data":"72469410a7fe5f56cc8a6650fd2f5a47a253943813bd987d810bded7994e4cf8"} Feb 02 11:17:57 crc kubenswrapper[4838]: I0202 11:17:57.308450 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97943a79-c8f8-4259-8cb4-1845307b8628","Type":"ContainerStarted","Data":"50971dd8e314e1167067a5fa70fe3a935b8b0c2d8007c0a95cb016a6668ea544"} Feb 02 11:17:57 crc kubenswrapper[4838]: I0202 11:17:57.309218 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 11:17:57 crc kubenswrapper[4838]: I0202 11:17:57.343110 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.781688149 podStartE2EDuration="7.342785537s" podCreationTimestamp="2026-02-02 11:17:50 +0000 UTC" firstStartedPulling="2026-02-02 11:17:51.435924767 +0000 UTC m=+1465.773025785" lastFinishedPulling="2026-02-02 11:17:56.997022105 +0000 UTC m=+1471.334123173" observedRunningTime="2026-02-02 11:17:57.33875223 +0000 UTC m=+1471.675853268" watchObservedRunningTime="2026-02-02 11:17:57.342785537 +0000 UTC m=+1471.679886575" Feb 02 11:17:58 crc kubenswrapper[4838]: I0202 11:17:58.320830 4838 generic.go:334] "Generic (PLEG): container finished" podID="04bf896a-e964-48a2-900e-44362394a6ac" containerID="12e95395a0c60b1f95b2e3d8453a9e78847600a049aa654dddca355aeb90889b" exitCode=0 Feb 02 11:17:58 crc kubenswrapper[4838]: I0202 11:17:58.322092 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-db-sync-z8sb2" event={"ID":"04bf896a-e964-48a2-900e-44362394a6ac","Type":"ContainerDied","Data":"12e95395a0c60b1f95b2e3d8453a9e78847600a049aa654dddca355aeb90889b"} Feb 02 11:17:59 crc kubenswrapper[4838]: I0202 11:17:59.780853 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:17:59 crc kubenswrapper[4838]: I0202 11:17:59.919414 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlw4f\" (UniqueName: \"kubernetes.io/projected/04bf896a-e964-48a2-900e-44362394a6ac-kube-api-access-rlw4f\") pod \"04bf896a-e964-48a2-900e-44362394a6ac\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " Feb 02 11:17:59 crc kubenswrapper[4838]: I0202 11:17:59.919519 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04bf896a-e964-48a2-900e-44362394a6ac-combined-ca-bundle\") pod \"04bf896a-e964-48a2-900e-44362394a6ac\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " Feb 02 11:17:59 crc kubenswrapper[4838]: I0202 11:17:59.919549 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04bf896a-e964-48a2-900e-44362394a6ac-scripts\") pod \"04bf896a-e964-48a2-900e-44362394a6ac\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " Feb 02 11:17:59 crc kubenswrapper[4838]: I0202 11:17:59.919696 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/04bf896a-e964-48a2-900e-44362394a6ac-etc-podinfo\") pod \"04bf896a-e964-48a2-900e-44362394a6ac\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " Feb 02 11:17:59 crc kubenswrapper[4838]: I0202 11:17:59.919722 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04bf896a-e964-48a2-900e-44362394a6ac-config-data\") pod \"04bf896a-e964-48a2-900e-44362394a6ac\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " Feb 02 11:17:59 crc kubenswrapper[4838]: I0202 11:17:59.919799 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/04bf896a-e964-48a2-900e-44362394a6ac-config-data-merged\") pod \"04bf896a-e964-48a2-900e-44362394a6ac\" (UID: \"04bf896a-e964-48a2-900e-44362394a6ac\") " Feb 02 11:17:59 crc kubenswrapper[4838]: I0202 11:17:59.920333 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04bf896a-e964-48a2-900e-44362394a6ac-config-data-merged" (OuterVolumeSpecName: "config-data-merged") pod "04bf896a-e964-48a2-900e-44362394a6ac" (UID: "04bf896a-e964-48a2-900e-44362394a6ac"). InnerVolumeSpecName "config-data-merged". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:17:59 crc kubenswrapper[4838]: I0202 11:17:59.924784 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/04bf896a-e964-48a2-900e-44362394a6ac-etc-podinfo" (OuterVolumeSpecName: "etc-podinfo") pod "04bf896a-e964-48a2-900e-44362394a6ac" (UID: "04bf896a-e964-48a2-900e-44362394a6ac"). InnerVolumeSpecName "etc-podinfo". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 02 11:17:59 crc kubenswrapper[4838]: I0202 11:17:59.925091 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04bf896a-e964-48a2-900e-44362394a6ac-kube-api-access-rlw4f" (OuterVolumeSpecName: "kube-api-access-rlw4f") pod "04bf896a-e964-48a2-900e-44362394a6ac" (UID: "04bf896a-e964-48a2-900e-44362394a6ac"). InnerVolumeSpecName "kube-api-access-rlw4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:17:59 crc kubenswrapper[4838]: I0202 11:17:59.927835 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04bf896a-e964-48a2-900e-44362394a6ac-scripts" (OuterVolumeSpecName: "scripts") pod "04bf896a-e964-48a2-900e-44362394a6ac" (UID: "04bf896a-e964-48a2-900e-44362394a6ac"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:59 crc kubenswrapper[4838]: I0202 11:17:59.947270 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04bf896a-e964-48a2-900e-44362394a6ac-config-data" (OuterVolumeSpecName: "config-data") pod "04bf896a-e964-48a2-900e-44362394a6ac" (UID: "04bf896a-e964-48a2-900e-44362394a6ac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:17:59 crc kubenswrapper[4838]: I0202 11:17:59.981587 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04bf896a-e964-48a2-900e-44362394a6ac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "04bf896a-e964-48a2-900e-44362394a6ac" (UID: "04bf896a-e964-48a2-900e-44362394a6ac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.022193 4838 reconciler_common.go:293] "Volume detached for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/04bf896a-e964-48a2-900e-44362394a6ac-etc-podinfo\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.022220 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04bf896a-e964-48a2-900e-44362394a6ac-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.022229 4838 reconciler_common.go:293] "Volume detached for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/04bf896a-e964-48a2-900e-44362394a6ac-config-data-merged\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.022238 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlw4f\" (UniqueName: \"kubernetes.io/projected/04bf896a-e964-48a2-900e-44362394a6ac-kube-api-access-rlw4f\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.022251 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04bf896a-e964-48a2-900e-44362394a6ac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.022259 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04bf896a-e964-48a2-900e-44362394a6ac-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.341043 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-db-sync-z8sb2" event={"ID":"04bf896a-e964-48a2-900e-44362394a6ac","Type":"ContainerDied","Data":"07ea8aa7a2ff3809bd2b770f4f63381ae8e62d662c05b272eb91ea44daf9507e"} Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.341090 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="07ea8aa7a2ff3809bd2b770f4f63381ae8e62d662c05b272eb91ea44daf9507e" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.341154 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-db-sync-z8sb2" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.810099 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-neutron-agent-9c5f849b9-h2frc"] Feb 02 11:18:00 crc kubenswrapper[4838]: E0202 11:18:00.810527 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04bf896a-e964-48a2-900e-44362394a6ac" containerName="ironic-db-sync" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.810547 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="04bf896a-e964-48a2-900e-44362394a6ac" containerName="ironic-db-sync" Feb 02 11:18:00 crc kubenswrapper[4838]: E0202 11:18:00.810556 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04bf896a-e964-48a2-900e-44362394a6ac" containerName="init" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.810563 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="04bf896a-e964-48a2-900e-44362394a6ac" containerName="init" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.810840 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="04bf896a-e964-48a2-900e-44362394a6ac" containerName="ironic-db-sync" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.811763 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-neutron-agent-9c5f849b9-h2frc" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.814179 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-ironic-neutron-agent-config-data" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.814381 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-ironic-dockercfg-smhrz" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.837318 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb80ba2d-57e2-4a6d-95cc-e67af228cb54-combined-ca-bundle\") pod \"ironic-neutron-agent-9c5f849b9-h2frc\" (UID: \"cb80ba2d-57e2-4a6d-95cc-e67af228cb54\") " pod="openstack/ironic-neutron-agent-9c5f849b9-h2frc" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.837434 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98tt5\" (UniqueName: \"kubernetes.io/projected/cb80ba2d-57e2-4a6d-95cc-e67af228cb54-kube-api-access-98tt5\") pod \"ironic-neutron-agent-9c5f849b9-h2frc\" (UID: \"cb80ba2d-57e2-4a6d-95cc-e67af228cb54\") " pod="openstack/ironic-neutron-agent-9c5f849b9-h2frc" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.837492 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cb80ba2d-57e2-4a6d-95cc-e67af228cb54-config\") pod \"ironic-neutron-agent-9c5f849b9-h2frc\" (UID: \"cb80ba2d-57e2-4a6d-95cc-e67af228cb54\") " pod="openstack/ironic-neutron-agent-9c5f849b9-h2frc" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.846247 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-neutron-agent-9c5f849b9-h2frc"] Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.855977 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-inspector-db-create-rrckz"] Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.857207 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-db-create-rrckz" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.885350 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-inspector-db-create-rrckz"] Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.938956 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sk6tx\" (UniqueName: \"kubernetes.io/projected/c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f-kube-api-access-sk6tx\") pod \"ironic-inspector-db-create-rrckz\" (UID: \"c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f\") " pod="openstack/ironic-inspector-db-create-rrckz" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.939009 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f-operator-scripts\") pod \"ironic-inspector-db-create-rrckz\" (UID: \"c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f\") " pod="openstack/ironic-inspector-db-create-rrckz" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.939037 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb80ba2d-57e2-4a6d-95cc-e67af228cb54-combined-ca-bundle\") pod \"ironic-neutron-agent-9c5f849b9-h2frc\" (UID: \"cb80ba2d-57e2-4a6d-95cc-e67af228cb54\") " pod="openstack/ironic-neutron-agent-9c5f849b9-h2frc" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.939099 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98tt5\" (UniqueName: \"kubernetes.io/projected/cb80ba2d-57e2-4a6d-95cc-e67af228cb54-kube-api-access-98tt5\") pod \"ironic-neutron-agent-9c5f849b9-h2frc\" (UID: \"cb80ba2d-57e2-4a6d-95cc-e67af228cb54\") " pod="openstack/ironic-neutron-agent-9c5f849b9-h2frc" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.939142 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cb80ba2d-57e2-4a6d-95cc-e67af228cb54-config\") pod \"ironic-neutron-agent-9c5f849b9-h2frc\" (UID: \"cb80ba2d-57e2-4a6d-95cc-e67af228cb54\") " pod="openstack/ironic-neutron-agent-9c5f849b9-h2frc" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.944415 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb80ba2d-57e2-4a6d-95cc-e67af228cb54-combined-ca-bundle\") pod \"ironic-neutron-agent-9c5f849b9-h2frc\" (UID: \"cb80ba2d-57e2-4a6d-95cc-e67af228cb54\") " pod="openstack/ironic-neutron-agent-9c5f849b9-h2frc" Feb 02 11:18:00 crc kubenswrapper[4838]: I0202 11:18:00.948684 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/cb80ba2d-57e2-4a6d-95cc-e67af228cb54-config\") pod \"ironic-neutron-agent-9c5f849b9-h2frc\" (UID: \"cb80ba2d-57e2-4a6d-95cc-e67af228cb54\") " pod="openstack/ironic-neutron-agent-9c5f849b9-h2frc" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.004364 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98tt5\" (UniqueName: \"kubernetes.io/projected/cb80ba2d-57e2-4a6d-95cc-e67af228cb54-kube-api-access-98tt5\") pod \"ironic-neutron-agent-9c5f849b9-h2frc\" (UID: \"cb80ba2d-57e2-4a6d-95cc-e67af228cb54\") " pod="openstack/ironic-neutron-agent-9c5f849b9-h2frc" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.019670 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-inspector-5de1-account-create-update-blrln"] Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.077014 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sk6tx\" (UniqueName: \"kubernetes.io/projected/c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f-kube-api-access-sk6tx\") pod \"ironic-inspector-db-create-rrckz\" (UID: \"c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f\") " pod="openstack/ironic-inspector-db-create-rrckz" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.082743 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f-operator-scripts\") pod \"ironic-inspector-db-create-rrckz\" (UID: \"c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f\") " pod="openstack/ironic-inspector-db-create-rrckz" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.086050 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-5de1-account-create-update-blrln" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.091487 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-inspector-db-secret" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.097017 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f-operator-scripts\") pod \"ironic-inspector-db-create-rrckz\" (UID: \"c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f\") " pod="openstack/ironic-inspector-db-create-rrckz" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.127673 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-inspector-5de1-account-create-update-blrln"] Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.132560 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-neutron-agent-9c5f849b9-h2frc" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.150242 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sk6tx\" (UniqueName: \"kubernetes.io/projected/c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f-kube-api-access-sk6tx\") pod \"ironic-inspector-db-create-rrckz\" (UID: \"c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f\") " pod="openstack/ironic-inspector-db-create-rrckz" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.187271 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-db-create-rrckz" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.191557 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zghtg\" (UniqueName: \"kubernetes.io/projected/9dec3cd0-38f7-4d41-92ad-2e0c8d36e136-kube-api-access-zghtg\") pod \"ironic-inspector-5de1-account-create-update-blrln\" (UID: \"9dec3cd0-38f7-4d41-92ad-2e0c8d36e136\") " pod="openstack/ironic-inspector-5de1-account-create-update-blrln" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.191714 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dec3cd0-38f7-4d41-92ad-2e0c8d36e136-operator-scripts\") pod \"ironic-inspector-5de1-account-create-update-blrln\" (UID: \"9dec3cd0-38f7-4d41-92ad-2e0c8d36e136\") " pod="openstack/ironic-inspector-5de1-account-create-update-blrln" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.203388 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-7777764cf4-c96ng"] Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.215918 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.218115 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.218455 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-api-scripts" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.263373 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-7777764cf4-c96ng"] Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.266901 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-api-config-data" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.268017 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-config-data" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.293739 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zghtg\" (UniqueName: \"kubernetes.io/projected/9dec3cd0-38f7-4d41-92ad-2e0c8d36e136-kube-api-access-zghtg\") pod \"ironic-inspector-5de1-account-create-update-blrln\" (UID: \"9dec3cd0-38f7-4d41-92ad-2e0c8d36e136\") " pod="openstack/ironic-inspector-5de1-account-create-update-blrln" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.293779 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dec3cd0-38f7-4d41-92ad-2e0c8d36e136-operator-scripts\") pod \"ironic-inspector-5de1-account-create-update-blrln\" (UID: \"9dec3cd0-38f7-4d41-92ad-2e0c8d36e136\") " pod="openstack/ironic-inspector-5de1-account-create-update-blrln" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.294435 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dec3cd0-38f7-4d41-92ad-2e0c8d36e136-operator-scripts\") pod \"ironic-inspector-5de1-account-create-update-blrln\" (UID: \"9dec3cd0-38f7-4d41-92ad-2e0c8d36e136\") " pod="openstack/ironic-inspector-5de1-account-create-update-blrln" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.320492 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zghtg\" (UniqueName: \"kubernetes.io/projected/9dec3cd0-38f7-4d41-92ad-2e0c8d36e136-kube-api-access-zghtg\") pod \"ironic-inspector-5de1-account-create-update-blrln\" (UID: \"9dec3cd0-38f7-4d41-92ad-2e0c8d36e136\") " pod="openstack/ironic-inspector-5de1-account-create-update-blrln" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.395345 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/26cd90e1-8974-474a-9c49-387310affd27-config-data-merged\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.395709 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/26cd90e1-8974-474a-9c49-387310affd27-etc-podinfo\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.395785 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-combined-ca-bundle\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.395863 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-config-data-custom\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.395887 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8lq5\" (UniqueName: \"kubernetes.io/projected/26cd90e1-8974-474a-9c49-387310affd27-kube-api-access-q8lq5\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.395915 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26cd90e1-8974-474a-9c49-387310affd27-logs\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.395975 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-config-data\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.396021 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-scripts\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.490484 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-5de1-account-create-update-blrln" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.503740 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-config-data\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.503812 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-scripts\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.503860 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/26cd90e1-8974-474a-9c49-387310affd27-config-data-merged\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.503886 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/26cd90e1-8974-474a-9c49-387310affd27-etc-podinfo\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.503953 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-combined-ca-bundle\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.504013 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-config-data-custom\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.504036 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8lq5\" (UniqueName: \"kubernetes.io/projected/26cd90e1-8974-474a-9c49-387310affd27-kube-api-access-q8lq5\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.504061 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26cd90e1-8974-474a-9c49-387310affd27-logs\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.504570 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26cd90e1-8974-474a-9c49-387310affd27-logs\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.508648 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/26cd90e1-8974-474a-9c49-387310affd27-config-data-merged\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.514219 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/26cd90e1-8974-474a-9c49-387310affd27-etc-podinfo\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.514804 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-config-data\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.518280 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-scripts\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.522299 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-combined-ca-bundle\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.530110 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-config-data-custom\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.531445 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8lq5\" (UniqueName: \"kubernetes.io/projected/26cd90e1-8974-474a-9c49-387310affd27-kube-api-access-q8lq5\") pod \"ironic-7777764cf4-c96ng\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.606893 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.791918 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-inspector-db-create-rrckz"] Feb 02 11:18:01 crc kubenswrapper[4838]: W0202 11:18:01.796788 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc6cfc3ca_16a5_4d4c_90ac_511fec2bdc3f.slice/crio-5f3c5bdf424ac5e2d0d2aea090234015377deebded797da2a8f33b856f2fcce9 WatchSource:0}: Error finding container 5f3c5bdf424ac5e2d0d2aea090234015377deebded797da2a8f33b856f2fcce9: Status 404 returned error can't find the container with id 5f3c5bdf424ac5e2d0d2aea090234015377deebded797da2a8f33b856f2fcce9 Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.906929 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-conductor-0"] Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.914518 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-conductor-0" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.916781 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-conductor-config-data" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.922871 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-conductor-scripts" Feb 02 11:18:01 crc kubenswrapper[4838]: I0202 11:18:01.942418 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-neutron-agent-9c5f849b9-h2frc"] Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:01.958807 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-conductor-0"] Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.012354 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/3318b8c1-22ca-45c4-a2fd-90205cea5a72-config-data-merged\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.012700 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3318b8c1-22ca-45c4-a2fd-90205cea5a72-config-data\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.012957 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3318b8c1-22ca-45c4-a2fd-90205cea5a72-scripts\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.013013 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.013057 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3318b8c1-22ca-45c4-a2fd-90205cea5a72-combined-ca-bundle\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.013113 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3318b8c1-22ca-45c4-a2fd-90205cea5a72-config-data-custom\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.013136 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkk8v\" (UniqueName: \"kubernetes.io/projected/3318b8c1-22ca-45c4-a2fd-90205cea5a72-kube-api-access-wkk8v\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.013242 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/3318b8c1-22ca-45c4-a2fd-90205cea5a72-etc-podinfo\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.098322 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-inspector-5de1-account-create-update-blrln"] Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.115033 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3318b8c1-22ca-45c4-a2fd-90205cea5a72-config-data\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.115137 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3318b8c1-22ca-45c4-a2fd-90205cea5a72-scripts\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.115160 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.115181 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3318b8c1-22ca-45c4-a2fd-90205cea5a72-combined-ca-bundle\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.115204 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3318b8c1-22ca-45c4-a2fd-90205cea5a72-config-data-custom\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.115221 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkk8v\" (UniqueName: \"kubernetes.io/projected/3318b8c1-22ca-45c4-a2fd-90205cea5a72-kube-api-access-wkk8v\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.115250 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/3318b8c1-22ca-45c4-a2fd-90205cea5a72-etc-podinfo\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.115284 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/3318b8c1-22ca-45c4-a2fd-90205cea5a72-config-data-merged\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.122187 4838 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.142142 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/3318b8c1-22ca-45c4-a2fd-90205cea5a72-config-data-merged\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.158112 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3318b8c1-22ca-45c4-a2fd-90205cea5a72-config-data\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.160539 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3318b8c1-22ca-45c4-a2fd-90205cea5a72-config-data-custom\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.160559 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3318b8c1-22ca-45c4-a2fd-90205cea5a72-scripts\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.164058 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkk8v\" (UniqueName: \"kubernetes.io/projected/3318b8c1-22ca-45c4-a2fd-90205cea5a72-kube-api-access-wkk8v\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.172325 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/3318b8c1-22ca-45c4-a2fd-90205cea5a72-etc-podinfo\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.188511 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3318b8c1-22ca-45c4-a2fd-90205cea5a72-combined-ca-bundle\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.204812 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ironic-conductor-0\" (UID: \"3318b8c1-22ca-45c4-a2fd-90205cea5a72\") " pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.289686 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-7777764cf4-c96ng"] Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.369381 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-conductor-0" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.370481 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-neutron-agent-9c5f849b9-h2frc" event={"ID":"cb80ba2d-57e2-4a6d-95cc-e67af228cb54","Type":"ContainerStarted","Data":"d4900135053f7927d24275a748dc88dc7532cd9dfbb99c05f6564a88f5fbce65"} Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.371346 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-5de1-account-create-update-blrln" event={"ID":"9dec3cd0-38f7-4d41-92ad-2e0c8d36e136","Type":"ContainerStarted","Data":"06c2eceace3bf2cc6d70299440d7dfe1d1248f250bc67f5746b65e6fb82212b3"} Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.372100 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-7777764cf4-c96ng" event={"ID":"26cd90e1-8974-474a-9c49-387310affd27","Type":"ContainerStarted","Data":"cf91e83c4e96845855a31e13ff3163a3227859f454b55db6b6328f0aac089591"} Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.373210 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-db-create-rrckz" event={"ID":"c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f","Type":"ContainerStarted","Data":"da64accedd8b51facf235c937846e436195424ab0dceb46c6920cb511f620b45"} Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.373235 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-db-create-rrckz" event={"ID":"c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f","Type":"ContainerStarted","Data":"5f3c5bdf424ac5e2d0d2aea090234015377deebded797da2a8f33b856f2fcce9"} Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.391985 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ironic-inspector-db-create-rrckz" podStartSLOduration=2.391965641 podStartE2EDuration="2.391965641s" podCreationTimestamp="2026-02-02 11:18:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:18:02.391130389 +0000 UTC m=+1476.728231407" watchObservedRunningTime="2026-02-02 11:18:02.391965641 +0000 UTC m=+1476.729066669" Feb 02 11:18:02 crc kubenswrapper[4838]: I0202 11:18:02.839374 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-conductor-0"] Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.397670 4838 generic.go:334] "Generic (PLEG): container finished" podID="9dec3cd0-38f7-4d41-92ad-2e0c8d36e136" containerID="28858d042b50683d5322341f67456255cb695f314cfbe492c35999dd9c4bf282" exitCode=0 Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.397906 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-5de1-account-create-update-blrln" event={"ID":"9dec3cd0-38f7-4d41-92ad-2e0c8d36e136","Type":"ContainerDied","Data":"28858d042b50683d5322341f67456255cb695f314cfbe492c35999dd9c4bf282"} Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.403840 4838 generic.go:334] "Generic (PLEG): container finished" podID="c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f" containerID="da64accedd8b51facf235c937846e436195424ab0dceb46c6920cb511f620b45" exitCode=0 Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.403899 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-db-create-rrckz" event={"ID":"c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f","Type":"ContainerDied","Data":"da64accedd8b51facf235c937846e436195424ab0dceb46c6920cb511f620b45"} Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.408278 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-conductor-0" event={"ID":"3318b8c1-22ca-45c4-a2fd-90205cea5a72","Type":"ContainerStarted","Data":"e1c67a33e7cbaffea91940f5a2d3d236a43fe296057c6a26093858d0e987e5f3"} Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.539601 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-68776656b4-wsgxk"] Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.550638 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.553251 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ironic-public-svc" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.553563 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ironic-internal-svc" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.560261 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-68776656b4-wsgxk"] Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.673188 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6281d3a-f605-47fd-a334-f5d814a86d4f-combined-ca-bundle\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.673244 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e6281d3a-f605-47fd-a334-f5d814a86d4f-config-data-custom\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.673265 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6281d3a-f605-47fd-a334-f5d814a86d4f-config-data\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.673283 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6281d3a-f605-47fd-a334-f5d814a86d4f-scripts\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.673302 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6281d3a-f605-47fd-a334-f5d814a86d4f-logs\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.673322 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/e6281d3a-f605-47fd-a334-f5d814a86d4f-config-data-merged\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.673340 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6281d3a-f605-47fd-a334-f5d814a86d4f-internal-tls-certs\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.673365 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6281d3a-f605-47fd-a334-f5d814a86d4f-public-tls-certs\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.679514 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/e6281d3a-f605-47fd-a334-f5d814a86d4f-etc-podinfo\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.679672 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ht6jw\" (UniqueName: \"kubernetes.io/projected/e6281d3a-f605-47fd-a334-f5d814a86d4f-kube-api-access-ht6jw\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.781810 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6281d3a-f605-47fd-a334-f5d814a86d4f-combined-ca-bundle\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.781864 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e6281d3a-f605-47fd-a334-f5d814a86d4f-config-data-custom\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.781883 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6281d3a-f605-47fd-a334-f5d814a86d4f-config-data\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.781908 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6281d3a-f605-47fd-a334-f5d814a86d4f-scripts\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.781933 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6281d3a-f605-47fd-a334-f5d814a86d4f-logs\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.781953 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/e6281d3a-f605-47fd-a334-f5d814a86d4f-config-data-merged\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.781972 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6281d3a-f605-47fd-a334-f5d814a86d4f-internal-tls-certs\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.782001 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6281d3a-f605-47fd-a334-f5d814a86d4f-public-tls-certs\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.782050 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/e6281d3a-f605-47fd-a334-f5d814a86d4f-etc-podinfo\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.782065 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ht6jw\" (UniqueName: \"kubernetes.io/projected/e6281d3a-f605-47fd-a334-f5d814a86d4f-kube-api-access-ht6jw\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.782681 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6281d3a-f605-47fd-a334-f5d814a86d4f-logs\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.783074 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/e6281d3a-f605-47fd-a334-f5d814a86d4f-config-data-merged\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.787649 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e6281d3a-f605-47fd-a334-f5d814a86d4f-config-data-custom\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.787805 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6281d3a-f605-47fd-a334-f5d814a86d4f-combined-ca-bundle\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.788502 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6281d3a-f605-47fd-a334-f5d814a86d4f-internal-tls-certs\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.789084 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6281d3a-f605-47fd-a334-f5d814a86d4f-scripts\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.792198 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6281d3a-f605-47fd-a334-f5d814a86d4f-public-tls-certs\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.802125 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/e6281d3a-f605-47fd-a334-f5d814a86d4f-etc-podinfo\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.802348 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6281d3a-f605-47fd-a334-f5d814a86d4f-config-data\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.808046 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ht6jw\" (UniqueName: \"kubernetes.io/projected/e6281d3a-f605-47fd-a334-f5d814a86d4f-kube-api-access-ht6jw\") pod \"ironic-68776656b4-wsgxk\" (UID: \"e6281d3a-f605-47fd-a334-f5d814a86d4f\") " pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:03 crc kubenswrapper[4838]: I0202 11:18:03.881391 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:04 crc kubenswrapper[4838]: I0202 11:18:04.419056 4838 generic.go:334] "Generic (PLEG): container finished" podID="3318b8c1-22ca-45c4-a2fd-90205cea5a72" containerID="477309b182aafe38cb2004df90c8d23cdced6d94a6a0bf1d895cc3342ad166a2" exitCode=0 Feb 02 11:18:04 crc kubenswrapper[4838]: I0202 11:18:04.419151 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-conductor-0" event={"ID":"3318b8c1-22ca-45c4-a2fd-90205cea5a72","Type":"ContainerDied","Data":"477309b182aafe38cb2004df90c8d23cdced6d94a6a0bf1d895cc3342ad166a2"} Feb 02 11:18:04 crc kubenswrapper[4838]: I0202 11:18:04.858383 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-5de1-account-create-update-blrln" Feb 02 11:18:04 crc kubenswrapper[4838]: I0202 11:18:04.969170 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-db-create-rrckz" Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.015201 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zghtg\" (UniqueName: \"kubernetes.io/projected/9dec3cd0-38f7-4d41-92ad-2e0c8d36e136-kube-api-access-zghtg\") pod \"9dec3cd0-38f7-4d41-92ad-2e0c8d36e136\" (UID: \"9dec3cd0-38f7-4d41-92ad-2e0c8d36e136\") " Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.015456 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dec3cd0-38f7-4d41-92ad-2e0c8d36e136-operator-scripts\") pod \"9dec3cd0-38f7-4d41-92ad-2e0c8d36e136\" (UID: \"9dec3cd0-38f7-4d41-92ad-2e0c8d36e136\") " Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.017180 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9dec3cd0-38f7-4d41-92ad-2e0c8d36e136-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9dec3cd0-38f7-4d41-92ad-2e0c8d36e136" (UID: "9dec3cd0-38f7-4d41-92ad-2e0c8d36e136"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.025026 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dec3cd0-38f7-4d41-92ad-2e0c8d36e136-kube-api-access-zghtg" (OuterVolumeSpecName: "kube-api-access-zghtg") pod "9dec3cd0-38f7-4d41-92ad-2e0c8d36e136" (UID: "9dec3cd0-38f7-4d41-92ad-2e0c8d36e136"). InnerVolumeSpecName "kube-api-access-zghtg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.117518 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sk6tx\" (UniqueName: \"kubernetes.io/projected/c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f-kube-api-access-sk6tx\") pod \"c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f\" (UID: \"c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f\") " Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.117917 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f-operator-scripts\") pod \"c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f\" (UID: \"c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f\") " Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.118428 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dec3cd0-38f7-4d41-92ad-2e0c8d36e136-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.118446 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zghtg\" (UniqueName: \"kubernetes.io/projected/9dec3cd0-38f7-4d41-92ad-2e0c8d36e136-kube-api-access-zghtg\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.118571 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f" (UID: "c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.121156 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f-kube-api-access-sk6tx" (OuterVolumeSpecName: "kube-api-access-sk6tx") pod "c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f" (UID: "c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f"). InnerVolumeSpecName "kube-api-access-sk6tx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:18:05 crc kubenswrapper[4838]: W0202 11:18:05.124697 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode6281d3a_f605_47fd_a334_f5d814a86d4f.slice/crio-4ce298b9f6d59b1271401a80ef9e37b688213d13b37e4f4c3d26e0b3c599a7ff WatchSource:0}: Error finding container 4ce298b9f6d59b1271401a80ef9e37b688213d13b37e4f4c3d26e0b3c599a7ff: Status 404 returned error can't find the container with id 4ce298b9f6d59b1271401a80ef9e37b688213d13b37e4f4c3d26e0b3c599a7ff Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.130555 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-68776656b4-wsgxk"] Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.220421 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sk6tx\" (UniqueName: \"kubernetes.io/projected/c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f-kube-api-access-sk6tx\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.220469 4838 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.439011 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-68776656b4-wsgxk" event={"ID":"e6281d3a-f605-47fd-a334-f5d814a86d4f","Type":"ContainerStarted","Data":"76afed98f8408020f38ae2b5bf7425db5a81cf1edf3d5e1ceb4eccfc870619eb"} Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.439061 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-68776656b4-wsgxk" event={"ID":"e6281d3a-f605-47fd-a334-f5d814a86d4f","Type":"ContainerStarted","Data":"4ce298b9f6d59b1271401a80ef9e37b688213d13b37e4f4c3d26e0b3c599a7ff"} Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.443989 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-5de1-account-create-update-blrln" Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.443989 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-5de1-account-create-update-blrln" event={"ID":"9dec3cd0-38f7-4d41-92ad-2e0c8d36e136","Type":"ContainerDied","Data":"06c2eceace3bf2cc6d70299440d7dfe1d1248f250bc67f5746b65e6fb82212b3"} Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.447863 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06c2eceace3bf2cc6d70299440d7dfe1d1248f250bc67f5746b65e6fb82212b3" Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.449179 4838 generic.go:334] "Generic (PLEG): container finished" podID="26cd90e1-8974-474a-9c49-387310affd27" containerID="a7c41d1aa482274ff51cf90b0de7515a71f4f1aa3666daf6013353b7b92cdd3f" exitCode=0 Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.449267 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-7777764cf4-c96ng" event={"ID":"26cd90e1-8974-474a-9c49-387310affd27","Type":"ContainerDied","Data":"a7c41d1aa482274ff51cf90b0de7515a71f4f1aa3666daf6013353b7b92cdd3f"} Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.451726 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-db-create-rrckz" event={"ID":"c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f","Type":"ContainerDied","Data":"5f3c5bdf424ac5e2d0d2aea090234015377deebded797da2a8f33b856f2fcce9"} Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.451753 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f3c5bdf424ac5e2d0d2aea090234015377deebded797da2a8f33b856f2fcce9" Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.451781 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-db-create-rrckz" Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.456497 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-neutron-agent-9c5f849b9-h2frc" event={"ID":"cb80ba2d-57e2-4a6d-95cc-e67af228cb54","Type":"ContainerStarted","Data":"e542eb2314e1b12fb6004684777353e598902c4d9f23f72c878aaaf816628be8"} Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.457133 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ironic-neutron-agent-9c5f849b9-h2frc" Feb 02 11:18:05 crc kubenswrapper[4838]: I0202 11:18:05.540337 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ironic-neutron-agent-9c5f849b9-h2frc" podStartSLOduration=2.945499056 podStartE2EDuration="5.54031528s" podCreationTimestamp="2026-02-02 11:18:00 +0000 UTC" firstStartedPulling="2026-02-02 11:18:01.922204121 +0000 UTC m=+1476.259305159" lastFinishedPulling="2026-02-02 11:18:04.517020345 +0000 UTC m=+1478.854121383" observedRunningTime="2026-02-02 11:18:05.510354588 +0000 UTC m=+1479.847455616" watchObservedRunningTime="2026-02-02 11:18:05.54031528 +0000 UTC m=+1479.877416308" Feb 02 11:18:06 crc kubenswrapper[4838]: I0202 11:18:06.468110 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-7777764cf4-c96ng" event={"ID":"26cd90e1-8974-474a-9c49-387310affd27","Type":"ContainerStarted","Data":"07f2acee6f71bc99353a83726dac020f797711c6ef022cfa588d96a1931fd90a"} Feb 02 11:18:06 crc kubenswrapper[4838]: I0202 11:18:06.468726 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-7777764cf4-c96ng" event={"ID":"26cd90e1-8974-474a-9c49-387310affd27","Type":"ContainerStarted","Data":"7dd96be9d2713d8d2cfb85bf530f3571ba2e89f0d27ef3790fd900be399a10c4"} Feb 02 11:18:06 crc kubenswrapper[4838]: I0202 11:18:06.469024 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:06 crc kubenswrapper[4838]: I0202 11:18:06.472170 4838 generic.go:334] "Generic (PLEG): container finished" podID="e6281d3a-f605-47fd-a334-f5d814a86d4f" containerID="76afed98f8408020f38ae2b5bf7425db5a81cf1edf3d5e1ceb4eccfc870619eb" exitCode=0 Feb 02 11:18:06 crc kubenswrapper[4838]: I0202 11:18:06.472315 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-68776656b4-wsgxk" event={"ID":"e6281d3a-f605-47fd-a334-f5d814a86d4f","Type":"ContainerDied","Data":"76afed98f8408020f38ae2b5bf7425db5a81cf1edf3d5e1ceb4eccfc870619eb"} Feb 02 11:18:06 crc kubenswrapper[4838]: I0202 11:18:06.518485 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ironic-7777764cf4-c96ng" podStartSLOduration=3.298414885 podStartE2EDuration="5.51846275s" podCreationTimestamp="2026-02-02 11:18:01 +0000 UTC" firstStartedPulling="2026-02-02 11:18:02.310913698 +0000 UTC m=+1476.648014726" lastFinishedPulling="2026-02-02 11:18:04.530961563 +0000 UTC m=+1478.868062591" observedRunningTime="2026-02-02 11:18:06.495986496 +0000 UTC m=+1480.833087534" watchObservedRunningTime="2026-02-02 11:18:06.51846275 +0000 UTC m=+1480.855563798" Feb 02 11:18:07 crc kubenswrapper[4838]: I0202 11:18:07.486344 4838 generic.go:334] "Generic (PLEG): container finished" podID="26cd90e1-8974-474a-9c49-387310affd27" containerID="07f2acee6f71bc99353a83726dac020f797711c6ef022cfa588d96a1931fd90a" exitCode=1 Feb 02 11:18:07 crc kubenswrapper[4838]: I0202 11:18:07.486419 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-7777764cf4-c96ng" event={"ID":"26cd90e1-8974-474a-9c49-387310affd27","Type":"ContainerDied","Data":"07f2acee6f71bc99353a83726dac020f797711c6ef022cfa588d96a1931fd90a"} Feb 02 11:18:07 crc kubenswrapper[4838]: I0202 11:18:07.487486 4838 scope.go:117] "RemoveContainer" containerID="07f2acee6f71bc99353a83726dac020f797711c6ef022cfa588d96a1931fd90a" Feb 02 11:18:09 crc kubenswrapper[4838]: I0202 11:18:09.517487 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-7777764cf4-c96ng" event={"ID":"26cd90e1-8974-474a-9c49-387310affd27","Type":"ContainerStarted","Data":"2f31db128fbc1cd7317c380e9346100613922362881f718c9accae28397099a3"} Feb 02 11:18:09 crc kubenswrapper[4838]: I0202 11:18:09.518078 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:09 crc kubenswrapper[4838]: I0202 11:18:09.520994 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-68776656b4-wsgxk" event={"ID":"e6281d3a-f605-47fd-a334-f5d814a86d4f","Type":"ContainerStarted","Data":"f107d650262f8f65823a7389116be969a91dc3e4bb5f245a0991e32eca8b84f2"} Feb 02 11:18:10 crc kubenswrapper[4838]: I0202 11:18:10.533516 4838 generic.go:334] "Generic (PLEG): container finished" podID="26cd90e1-8974-474a-9c49-387310affd27" containerID="2f31db128fbc1cd7317c380e9346100613922362881f718c9accae28397099a3" exitCode=1 Feb 02 11:18:10 crc kubenswrapper[4838]: I0202 11:18:10.533592 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-7777764cf4-c96ng" event={"ID":"26cd90e1-8974-474a-9c49-387310affd27","Type":"ContainerDied","Data":"2f31db128fbc1cd7317c380e9346100613922362881f718c9accae28397099a3"} Feb 02 11:18:10 crc kubenswrapper[4838]: I0202 11:18:10.533644 4838 scope.go:117] "RemoveContainer" containerID="07f2acee6f71bc99353a83726dac020f797711c6ef022cfa588d96a1931fd90a" Feb 02 11:18:10 crc kubenswrapper[4838]: I0202 11:18:10.534216 4838 scope.go:117] "RemoveContainer" containerID="2f31db128fbc1cd7317c380e9346100613922362881f718c9accae28397099a3" Feb 02 11:18:10 crc kubenswrapper[4838]: E0202 11:18:10.534603 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ironic-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ironic-api pod=ironic-7777764cf4-c96ng_openstack(26cd90e1-8974-474a-9c49-387310affd27)\"" pod="openstack/ironic-7777764cf4-c96ng" podUID="26cd90e1-8974-474a-9c49-387310affd27" Feb 02 11:18:10 crc kubenswrapper[4838]: I0202 11:18:10.538831 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-68776656b4-wsgxk" event={"ID":"e6281d3a-f605-47fd-a334-f5d814a86d4f","Type":"ContainerStarted","Data":"840628d73d9913102863a343cb6f3bef66f9d8ae85a2d8a44c2812192e7bd3c4"} Feb 02 11:18:10 crc kubenswrapper[4838]: I0202 11:18:10.539052 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:10 crc kubenswrapper[4838]: I0202 11:18:10.578140 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ironic-68776656b4-wsgxk" podStartSLOduration=7.578123573 podStartE2EDuration="7.578123573s" podCreationTimestamp="2026-02-02 11:18:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:18:10.574202609 +0000 UTC m=+1484.911303637" watchObservedRunningTime="2026-02-02 11:18:10.578123573 +0000 UTC m=+1484.915224601" Feb 02 11:18:11 crc kubenswrapper[4838]: I0202 11:18:11.157816 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ironic-neutron-agent-9c5f849b9-h2frc" Feb 02 11:18:11 crc kubenswrapper[4838]: I0202 11:18:11.548226 4838 scope.go:117] "RemoveContainer" containerID="2f31db128fbc1cd7317c380e9346100613922362881f718c9accae28397099a3" Feb 02 11:18:11 crc kubenswrapper[4838]: E0202 11:18:11.548418 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ironic-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ironic-api pod=ironic-7777764cf4-c96ng_openstack(26cd90e1-8974-474a-9c49-387310affd27)\"" pod="openstack/ironic-7777764cf4-c96ng" podUID="26cd90e1-8974-474a-9c49-387310affd27" Feb 02 11:18:11 crc kubenswrapper[4838]: I0202 11:18:11.607663 4838 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:12 crc kubenswrapper[4838]: I0202 11:18:12.555808 4838 scope.go:117] "RemoveContainer" containerID="2f31db128fbc1cd7317c380e9346100613922362881f718c9accae28397099a3" Feb 02 11:18:12 crc kubenswrapper[4838]: E0202 11:18:12.556312 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ironic-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ironic-api pod=ironic-7777764cf4-c96ng_openstack(26cd90e1-8974-474a-9c49-387310affd27)\"" pod="openstack/ironic-7777764cf4-c96ng" podUID="26cd90e1-8974-474a-9c49-387310affd27" Feb 02 11:18:15 crc kubenswrapper[4838]: I0202 11:18:15.284012 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ironic-68776656b4-wsgxk" Feb 02 11:18:15 crc kubenswrapper[4838]: I0202 11:18:15.369820 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ironic-7777764cf4-c96ng"] Feb 02 11:18:15 crc kubenswrapper[4838]: I0202 11:18:15.370088 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ironic-7777764cf4-c96ng" podUID="26cd90e1-8974-474a-9c49-387310affd27" containerName="ironic-api-log" containerID="cri-o://7dd96be9d2713d8d2cfb85bf530f3571ba2e89f0d27ef3790fd900be399a10c4" gracePeriod=60 Feb 02 11:18:15 crc kubenswrapper[4838]: I0202 11:18:15.590856 4838 generic.go:334] "Generic (PLEG): container finished" podID="26cd90e1-8974-474a-9c49-387310affd27" containerID="7dd96be9d2713d8d2cfb85bf530f3571ba2e89f0d27ef3790fd900be399a10c4" exitCode=143 Feb 02 11:18:15 crc kubenswrapper[4838]: I0202 11:18:15.590898 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-7777764cf4-c96ng" event={"ID":"26cd90e1-8974-474a-9c49-387310affd27","Type":"ContainerDied","Data":"7dd96be9d2713d8d2cfb85bf530f3571ba2e89f0d27ef3790fd900be399a10c4"} Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.072548 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-inspector-db-sync-s47dm"] Feb 02 11:18:16 crc kubenswrapper[4838]: E0202 11:18:16.073008 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f" containerName="mariadb-database-create" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.073032 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f" containerName="mariadb-database-create" Feb 02 11:18:16 crc kubenswrapper[4838]: E0202 11:18:16.073046 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dec3cd0-38f7-4d41-92ad-2e0c8d36e136" containerName="mariadb-account-create-update" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.073053 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dec3cd0-38f7-4d41-92ad-2e0c8d36e136" containerName="mariadb-account-create-update" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.073245 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dec3cd0-38f7-4d41-92ad-2e0c8d36e136" containerName="mariadb-account-create-update" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.073265 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f" containerName="mariadb-database-create" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.074061 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.077248 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-inspector-config-data" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.077516 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-inspector-scripts" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.132347 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-inspector-db-sync-s47dm"] Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.153447 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"ironic-inspector-db-sync-s47dm\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.153504 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-etc-podinfo\") pod \"ironic-inspector-db-sync-s47dm\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.153532 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-combined-ca-bundle\") pod \"ironic-inspector-db-sync-s47dm\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.153577 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-scripts\") pod \"ironic-inspector-db-sync-s47dm\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.153683 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxmdb\" (UniqueName: \"kubernetes.io/projected/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-kube-api-access-rxmdb\") pod \"ironic-inspector-db-sync-s47dm\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.153708 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-config\") pod \"ironic-inspector-db-sync-s47dm\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.153743 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-var-lib-ironic\") pod \"ironic-inspector-db-sync-s47dm\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.257632 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-scripts\") pod \"ironic-inspector-db-sync-s47dm\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.257757 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxmdb\" (UniqueName: \"kubernetes.io/projected/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-kube-api-access-rxmdb\") pod \"ironic-inspector-db-sync-s47dm\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.257786 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-config\") pod \"ironic-inspector-db-sync-s47dm\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.257824 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-var-lib-ironic\") pod \"ironic-inspector-db-sync-s47dm\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.257846 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"ironic-inspector-db-sync-s47dm\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.257869 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-etc-podinfo\") pod \"ironic-inspector-db-sync-s47dm\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.257890 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-combined-ca-bundle\") pod \"ironic-inspector-db-sync-s47dm\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.258530 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"ironic-inspector-db-sync-s47dm\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.264190 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-scripts\") pod \"ironic-inspector-db-sync-s47dm\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.264372 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-config\") pod \"ironic-inspector-db-sync-s47dm\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.265695 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-var-lib-ironic\") pod \"ironic-inspector-db-sync-s47dm\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.271416 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-etc-podinfo\") pod \"ironic-inspector-db-sync-s47dm\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.275776 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxmdb\" (UniqueName: \"kubernetes.io/projected/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-kube-api-access-rxmdb\") pod \"ironic-inspector-db-sync-s47dm\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.282515 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-combined-ca-bundle\") pod \"ironic-inspector-db-sync-s47dm\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.443445 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.445884 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.566571 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-config-data\") pod \"26cd90e1-8974-474a-9c49-387310affd27\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.566708 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-combined-ca-bundle\") pod \"26cd90e1-8974-474a-9c49-387310affd27\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.566834 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/26cd90e1-8974-474a-9c49-387310affd27-etc-podinfo\") pod \"26cd90e1-8974-474a-9c49-387310affd27\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.566860 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/26cd90e1-8974-474a-9c49-387310affd27-config-data-merged\") pod \"26cd90e1-8974-474a-9c49-387310affd27\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.566884 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-scripts\") pod \"26cd90e1-8974-474a-9c49-387310affd27\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.566906 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26cd90e1-8974-474a-9c49-387310affd27-logs\") pod \"26cd90e1-8974-474a-9c49-387310affd27\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.566946 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8lq5\" (UniqueName: \"kubernetes.io/projected/26cd90e1-8974-474a-9c49-387310affd27-kube-api-access-q8lq5\") pod \"26cd90e1-8974-474a-9c49-387310affd27\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.567420 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-config-data-custom\") pod \"26cd90e1-8974-474a-9c49-387310affd27\" (UID: \"26cd90e1-8974-474a-9c49-387310affd27\") " Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.567714 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26cd90e1-8974-474a-9c49-387310affd27-config-data-merged" (OuterVolumeSpecName: "config-data-merged") pod "26cd90e1-8974-474a-9c49-387310affd27" (UID: "26cd90e1-8974-474a-9c49-387310affd27"). InnerVolumeSpecName "config-data-merged". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.568742 4838 reconciler_common.go:293] "Volume detached for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/26cd90e1-8974-474a-9c49-387310affd27-config-data-merged\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.569746 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26cd90e1-8974-474a-9c49-387310affd27-logs" (OuterVolumeSpecName: "logs") pod "26cd90e1-8974-474a-9c49-387310affd27" (UID: "26cd90e1-8974-474a-9c49-387310affd27"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.601878 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-7777764cf4-c96ng" event={"ID":"26cd90e1-8974-474a-9c49-387310affd27","Type":"ContainerDied","Data":"cf91e83c4e96845855a31e13ff3163a3227859f454b55db6b6328f0aac089591"} Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.601945 4838 scope.go:117] "RemoveContainer" containerID="2f31db128fbc1cd7317c380e9346100613922362881f718c9accae28397099a3" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.602070 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-7777764cf4-c96ng" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.646723 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-scripts" (OuterVolumeSpecName: "scripts") pod "26cd90e1-8974-474a-9c49-387310affd27" (UID: "26cd90e1-8974-474a-9c49-387310affd27"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.647155 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26cd90e1-8974-474a-9c49-387310affd27-kube-api-access-q8lq5" (OuterVolumeSpecName: "kube-api-access-q8lq5") pod "26cd90e1-8974-474a-9c49-387310affd27" (UID: "26cd90e1-8974-474a-9c49-387310affd27"). InnerVolumeSpecName "kube-api-access-q8lq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.654806 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-config-data" (OuterVolumeSpecName: "config-data") pod "26cd90e1-8974-474a-9c49-387310affd27" (UID: "26cd90e1-8974-474a-9c49-387310affd27"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.655177 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/26cd90e1-8974-474a-9c49-387310affd27-etc-podinfo" (OuterVolumeSpecName: "etc-podinfo") pod "26cd90e1-8974-474a-9c49-387310affd27" (UID: "26cd90e1-8974-474a-9c49-387310affd27"). InnerVolumeSpecName "etc-podinfo". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.657364 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "26cd90e1-8974-474a-9c49-387310affd27" (UID: "26cd90e1-8974-474a-9c49-387310affd27"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.668152 4838 scope.go:117] "RemoveContainer" containerID="7dd96be9d2713d8d2cfb85bf530f3571ba2e89f0d27ef3790fd900be399a10c4" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.673810 4838 reconciler_common.go:293] "Volume detached for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/26cd90e1-8974-474a-9c49-387310affd27-etc-podinfo\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.673986 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.674065 4838 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26cd90e1-8974-474a-9c49-387310affd27-logs\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.674130 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8lq5\" (UniqueName: \"kubernetes.io/projected/26cd90e1-8974-474a-9c49-387310affd27-kube-api-access-q8lq5\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.674227 4838 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.674332 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.696021 4838 scope.go:117] "RemoveContainer" containerID="a7c41d1aa482274ff51cf90b0de7515a71f4f1aa3666daf6013353b7b92cdd3f" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.705679 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "26cd90e1-8974-474a-9c49-387310affd27" (UID: "26cd90e1-8974-474a-9c49-387310affd27"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.777079 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26cd90e1-8974-474a-9c49-387310affd27-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.956578 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ironic-7777764cf4-c96ng"] Feb 02 11:18:16 crc kubenswrapper[4838]: I0202 11:18:16.965005 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ironic-7777764cf4-c96ng"] Feb 02 11:18:17 crc kubenswrapper[4838]: I0202 11:18:17.021779 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-inspector-db-sync-s47dm"] Feb 02 11:18:17 crc kubenswrapper[4838]: W0202 11:18:17.026204 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a115a1d_336b_4c0d_81c4_3ce5c52b05a5.slice/crio-5dba079c547a7e43617308a6ef8253d7eccda695c5d1ec9a97ec9742e2cadddd WatchSource:0}: Error finding container 5dba079c547a7e43617308a6ef8253d7eccda695c5d1ec9a97ec9742e2cadddd: Status 404 returned error can't find the container with id 5dba079c547a7e43617308a6ef8253d7eccda695c5d1ec9a97ec9742e2cadddd Feb 02 11:18:17 crc kubenswrapper[4838]: I0202 11:18:17.612092 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-conductor-0" event={"ID":"3318b8c1-22ca-45c4-a2fd-90205cea5a72","Type":"ContainerStarted","Data":"3406b06c3a34192bae2d8393dbb580f6c4a6574e8346f0d0d815a09a66afb184"} Feb 02 11:18:17 crc kubenswrapper[4838]: I0202 11:18:17.614372 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-db-sync-s47dm" event={"ID":"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5","Type":"ContainerStarted","Data":"5dba079c547a7e43617308a6ef8253d7eccda695c5d1ec9a97ec9742e2cadddd"} Feb 02 11:18:18 crc kubenswrapper[4838]: I0202 11:18:18.515845 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26cd90e1-8974-474a-9c49-387310affd27" path="/var/lib/kubelet/pods/26cd90e1-8974-474a-9c49-387310affd27/volumes" Feb 02 11:18:20 crc kubenswrapper[4838]: I0202 11:18:20.290130 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:18:20 crc kubenswrapper[4838]: I0202 11:18:20.290693 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="97943a79-c8f8-4259-8cb4-1845307b8628" containerName="ceilometer-central-agent" containerID="cri-o://184d0cf646d577a21ee80dc9fec3b85f727789002da9c46224376e606800c9e6" gracePeriod=30 Feb 02 11:18:20 crc kubenswrapper[4838]: I0202 11:18:20.290812 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="97943a79-c8f8-4259-8cb4-1845307b8628" containerName="proxy-httpd" containerID="cri-o://50971dd8e314e1167067a5fa70fe3a935b8b0c2d8007c0a95cb016a6668ea544" gracePeriod=30 Feb 02 11:18:20 crc kubenswrapper[4838]: I0202 11:18:20.290988 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="97943a79-c8f8-4259-8cb4-1845307b8628" containerName="sg-core" containerID="cri-o://f5f8664f861428624e0b00c05b07cf395430f2da744bea26e0edf808c7c6a84f" gracePeriod=30 Feb 02 11:18:20 crc kubenswrapper[4838]: I0202 11:18:20.291039 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="97943a79-c8f8-4259-8cb4-1845307b8628" containerName="ceilometer-notification-agent" containerID="cri-o://72469410a7fe5f56cc8a6650fd2f5a47a253943813bd987d810bded7994e4cf8" gracePeriod=30 Feb 02 11:18:20 crc kubenswrapper[4838]: I0202 11:18:20.307941 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="97943a79-c8f8-4259-8cb4-1845307b8628" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.183:3000/\": EOF" Feb 02 11:18:20 crc kubenswrapper[4838]: I0202 11:18:20.662794 4838 generic.go:334] "Generic (PLEG): container finished" podID="97943a79-c8f8-4259-8cb4-1845307b8628" containerID="50971dd8e314e1167067a5fa70fe3a935b8b0c2d8007c0a95cb016a6668ea544" exitCode=0 Feb 02 11:18:20 crc kubenswrapper[4838]: I0202 11:18:20.663024 4838 generic.go:334] "Generic (PLEG): container finished" podID="97943a79-c8f8-4259-8cb4-1845307b8628" containerID="f5f8664f861428624e0b00c05b07cf395430f2da744bea26e0edf808c7c6a84f" exitCode=2 Feb 02 11:18:20 crc kubenswrapper[4838]: I0202 11:18:20.663083 4838 generic.go:334] "Generic (PLEG): container finished" podID="97943a79-c8f8-4259-8cb4-1845307b8628" containerID="184d0cf646d577a21ee80dc9fec3b85f727789002da9c46224376e606800c9e6" exitCode=0 Feb 02 11:18:20 crc kubenswrapper[4838]: I0202 11:18:20.662876 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97943a79-c8f8-4259-8cb4-1845307b8628","Type":"ContainerDied","Data":"50971dd8e314e1167067a5fa70fe3a935b8b0c2d8007c0a95cb016a6668ea544"} Feb 02 11:18:20 crc kubenswrapper[4838]: I0202 11:18:20.663211 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97943a79-c8f8-4259-8cb4-1845307b8628","Type":"ContainerDied","Data":"f5f8664f861428624e0b00c05b07cf395430f2da744bea26e0edf808c7c6a84f"} Feb 02 11:18:20 crc kubenswrapper[4838]: I0202 11:18:20.663302 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97943a79-c8f8-4259-8cb4-1845307b8628","Type":"ContainerDied","Data":"184d0cf646d577a21ee80dc9fec3b85f727789002da9c46224376e606800c9e6"} Feb 02 11:18:20 crc kubenswrapper[4838]: I0202 11:18:20.909564 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="97943a79-c8f8-4259-8cb4-1845307b8628" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.183:3000/\": dial tcp 10.217.0.183:3000: connect: connection refused" Feb 02 11:18:21 crc kubenswrapper[4838]: I0202 11:18:21.679722 4838 generic.go:334] "Generic (PLEG): container finished" podID="3318b8c1-22ca-45c4-a2fd-90205cea5a72" containerID="3406b06c3a34192bae2d8393dbb580f6c4a6574e8346f0d0d815a09a66afb184" exitCode=0 Feb 02 11:18:21 crc kubenswrapper[4838]: I0202 11:18:21.679778 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-conductor-0" event={"ID":"3318b8c1-22ca-45c4-a2fd-90205cea5a72","Type":"ContainerDied","Data":"3406b06c3a34192bae2d8393dbb580f6c4a6574e8346f0d0d815a09a66afb184"} Feb 02 11:18:22 crc kubenswrapper[4838]: I0202 11:18:22.688866 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-db-sync-s47dm" event={"ID":"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5","Type":"ContainerStarted","Data":"972ee48d93289c4265f7af6fcfe273ad08f49a74e4bfd894462152685b79a3dd"} Feb 02 11:18:22 crc kubenswrapper[4838]: I0202 11:18:22.714836 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ironic-inspector-db-sync-s47dm" podStartSLOduration=2.100271429 podStartE2EDuration="6.714816172s" podCreationTimestamp="2026-02-02 11:18:16 +0000 UTC" firstStartedPulling="2026-02-02 11:18:17.028952694 +0000 UTC m=+1491.366053722" lastFinishedPulling="2026-02-02 11:18:21.643497427 +0000 UTC m=+1495.980598465" observedRunningTime="2026-02-02 11:18:22.707457888 +0000 UTC m=+1497.044558956" watchObservedRunningTime="2026-02-02 11:18:22.714816172 +0000 UTC m=+1497.051917210" Feb 02 11:18:25 crc kubenswrapper[4838]: I0202 11:18:25.728724 4838 generic.go:334] "Generic (PLEG): container finished" podID="97943a79-c8f8-4259-8cb4-1845307b8628" containerID="72469410a7fe5f56cc8a6650fd2f5a47a253943813bd987d810bded7994e4cf8" exitCode=0 Feb 02 11:18:25 crc kubenswrapper[4838]: I0202 11:18:25.728935 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97943a79-c8f8-4259-8cb4-1845307b8628","Type":"ContainerDied","Data":"72469410a7fe5f56cc8a6650fd2f5a47a253943813bd987d810bded7994e4cf8"} Feb 02 11:18:34 crc kubenswrapper[4838]: I0202 11:18:34.925397 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.029860 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97943a79-c8f8-4259-8cb4-1845307b8628-run-httpd\") pod \"97943a79-c8f8-4259-8cb4-1845307b8628\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.030082 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-sg-core-conf-yaml\") pod \"97943a79-c8f8-4259-8cb4-1845307b8628\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.030176 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-scripts\") pod \"97943a79-c8f8-4259-8cb4-1845307b8628\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.030214 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-ceilometer-tls-certs\") pod \"97943a79-c8f8-4259-8cb4-1845307b8628\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.030253 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-config-data\") pod \"97943a79-c8f8-4259-8cb4-1845307b8628\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.030312 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7j85p\" (UniqueName: \"kubernetes.io/projected/97943a79-c8f8-4259-8cb4-1845307b8628-kube-api-access-7j85p\") pod \"97943a79-c8f8-4259-8cb4-1845307b8628\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.030384 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-combined-ca-bundle\") pod \"97943a79-c8f8-4259-8cb4-1845307b8628\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.030422 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97943a79-c8f8-4259-8cb4-1845307b8628-log-httpd\") pod \"97943a79-c8f8-4259-8cb4-1845307b8628\" (UID: \"97943a79-c8f8-4259-8cb4-1845307b8628\") " Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.030904 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97943a79-c8f8-4259-8cb4-1845307b8628-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "97943a79-c8f8-4259-8cb4-1845307b8628" (UID: "97943a79-c8f8-4259-8cb4-1845307b8628"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.031081 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97943a79-c8f8-4259-8cb4-1845307b8628-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "97943a79-c8f8-4259-8cb4-1845307b8628" (UID: "97943a79-c8f8-4259-8cb4-1845307b8628"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.031331 4838 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97943a79-c8f8-4259-8cb4-1845307b8628-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.031356 4838 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97943a79-c8f8-4259-8cb4-1845307b8628-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.035802 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97943a79-c8f8-4259-8cb4-1845307b8628-kube-api-access-7j85p" (OuterVolumeSpecName: "kube-api-access-7j85p") pod "97943a79-c8f8-4259-8cb4-1845307b8628" (UID: "97943a79-c8f8-4259-8cb4-1845307b8628"). InnerVolumeSpecName "kube-api-access-7j85p". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.047481 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-scripts" (OuterVolumeSpecName: "scripts") pod "97943a79-c8f8-4259-8cb4-1845307b8628" (UID: "97943a79-c8f8-4259-8cb4-1845307b8628"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.065337 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "97943a79-c8f8-4259-8cb4-1845307b8628" (UID: "97943a79-c8f8-4259-8cb4-1845307b8628"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.082975 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "97943a79-c8f8-4259-8cb4-1845307b8628" (UID: "97943a79-c8f8-4259-8cb4-1845307b8628"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.121340 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "97943a79-c8f8-4259-8cb4-1845307b8628" (UID: "97943a79-c8f8-4259-8cb4-1845307b8628"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.132842 4838 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.132891 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.132903 4838 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.132916 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7j85p\" (UniqueName: \"kubernetes.io/projected/97943a79-c8f8-4259-8cb4-1845307b8628-kube-api-access-7j85p\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.132930 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.330129 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-config-data" (OuterVolumeSpecName: "config-data") pod "97943a79-c8f8-4259-8cb4-1845307b8628" (UID: "97943a79-c8f8-4259-8cb4-1845307b8628"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.337251 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97943a79-c8f8-4259-8cb4-1845307b8628-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:35 crc kubenswrapper[4838]: E0202 11:18:35.539140 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ironic-pxe:current-podified" Feb 02 11:18:35 crc kubenswrapper[4838]: E0202 11:18:35.539306 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:pxe-init,Image:quay.io/podified-antelope-centos9/openstack-ironic-pxe:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/container-scripts/pxe-init.sh],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:IronicPassword,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:IronicPassword,Optional:nil,},},},EnvVar{Name:PodName,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:PodNamespace,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:ProvisionNetwork,Value:,ValueFrom:nil,},EnvVar{Name:DatabaseHost,Value:openstack.openstack.svc,ValueFrom:nil,},EnvVar{Name:DatabaseName,Value:ironic,ValueFrom:nil,},EnvVar{Name:DeployHTTPURL,Value:http://%(PodName)s-%(PodNamespace)s.%(IngressDomain)s/,ValueFrom:nil,},EnvVar{Name:IngressDomain,Value:apps-crc.testing,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-merged,ReadOnly:false,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:etc-podinfo,ReadOnly:false,MountPath:/etc/podinfo,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-lib-ironic,ReadOnly:false,MountPath:/var/lib/ironic,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-custom,ReadOnly:true,MountPath:/var/lib/config-data/custom,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wkk8v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-conductor-0_openstack(3318b8c1-22ca-45c4-a2fd-90205cea5a72): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:18:35 crc kubenswrapper[4838]: E0202 11:18:35.540891 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pxe-init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ironic-conductor-0" podUID="3318b8c1-22ca-45c4-a2fd-90205cea5a72" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.826421 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97943a79-c8f8-4259-8cb4-1845307b8628","Type":"ContainerDied","Data":"cb0a57b9db10be48e1c47346667e71e318241cbfe536a72cc1d3de8a1eaf6d9e"} Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.826493 4838 scope.go:117] "RemoveContainer" containerID="50971dd8e314e1167067a5fa70fe3a935b8b0c2d8007c0a95cb016a6668ea544" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.826516 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.850951 4838 scope.go:117] "RemoveContainer" containerID="f5f8664f861428624e0b00c05b07cf395430f2da744bea26e0edf808c7c6a84f" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.913516 4838 scope.go:117] "RemoveContainer" containerID="72469410a7fe5f56cc8a6650fd2f5a47a253943813bd987d810bded7994e4cf8" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.916554 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.927398 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.937547 4838 scope.go:117] "RemoveContainer" containerID="184d0cf646d577a21ee80dc9fec3b85f727789002da9c46224376e606800c9e6" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.943316 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:18:35 crc kubenswrapper[4838]: E0202 11:18:35.943730 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26cd90e1-8974-474a-9c49-387310affd27" containerName="ironic-api-log" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.943754 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="26cd90e1-8974-474a-9c49-387310affd27" containerName="ironic-api-log" Feb 02 11:18:35 crc kubenswrapper[4838]: E0202 11:18:35.943795 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97943a79-c8f8-4259-8cb4-1845307b8628" containerName="sg-core" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.943803 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="97943a79-c8f8-4259-8cb4-1845307b8628" containerName="sg-core" Feb 02 11:18:35 crc kubenswrapper[4838]: E0202 11:18:35.943811 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97943a79-c8f8-4259-8cb4-1845307b8628" containerName="ceilometer-notification-agent" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.943818 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="97943a79-c8f8-4259-8cb4-1845307b8628" containerName="ceilometer-notification-agent" Feb 02 11:18:35 crc kubenswrapper[4838]: E0202 11:18:35.943830 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97943a79-c8f8-4259-8cb4-1845307b8628" containerName="ceilometer-central-agent" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.943850 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="97943a79-c8f8-4259-8cb4-1845307b8628" containerName="ceilometer-central-agent" Feb 02 11:18:35 crc kubenswrapper[4838]: E0202 11:18:35.943867 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26cd90e1-8974-474a-9c49-387310affd27" containerName="ironic-api" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.943874 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="26cd90e1-8974-474a-9c49-387310affd27" containerName="ironic-api" Feb 02 11:18:35 crc kubenswrapper[4838]: E0202 11:18:35.943886 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26cd90e1-8974-474a-9c49-387310affd27" containerName="init" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.943899 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="26cd90e1-8974-474a-9c49-387310affd27" containerName="init" Feb 02 11:18:35 crc kubenswrapper[4838]: E0202 11:18:35.943910 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97943a79-c8f8-4259-8cb4-1845307b8628" containerName="proxy-httpd" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.943919 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="97943a79-c8f8-4259-8cb4-1845307b8628" containerName="proxy-httpd" Feb 02 11:18:35 crc kubenswrapper[4838]: E0202 11:18:35.943930 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26cd90e1-8974-474a-9c49-387310affd27" containerName="ironic-api" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.943936 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="26cd90e1-8974-474a-9c49-387310affd27" containerName="ironic-api" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.944106 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="26cd90e1-8974-474a-9c49-387310affd27" containerName="ironic-api" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.944125 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="97943a79-c8f8-4259-8cb4-1845307b8628" containerName="ceilometer-notification-agent" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.944135 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="97943a79-c8f8-4259-8cb4-1845307b8628" containerName="ceilometer-central-agent" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.944146 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="97943a79-c8f8-4259-8cb4-1845307b8628" containerName="sg-core" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.944157 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="26cd90e1-8974-474a-9c49-387310affd27" containerName="ironic-api-log" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.944170 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="97943a79-c8f8-4259-8cb4-1845307b8628" containerName="proxy-httpd" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.944474 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="26cd90e1-8974-474a-9c49-387310affd27" containerName="ironic-api" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.945813 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.948946 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.949193 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.950814 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 11:18:35 crc kubenswrapper[4838]: I0202 11:18:35.972043 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.049601 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.049681 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.049709 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2x65g\" (UniqueName: \"kubernetes.io/projected/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-kube-api-access-2x65g\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.049731 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-run-httpd\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.049753 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-log-httpd\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.049776 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.049806 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-scripts\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.049866 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-config-data\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.159902 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2x65g\" (UniqueName: \"kubernetes.io/projected/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-kube-api-access-2x65g\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.159951 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-run-httpd\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.159980 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-log-httpd\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.160006 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.160041 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-scripts\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.160105 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-config-data\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.160139 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.160171 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.160965 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-log-httpd\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.161638 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-run-httpd\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.166575 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.170381 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-scripts\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.178245 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.178422 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-config-data\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.182312 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2x65g\" (UniqueName: \"kubernetes.io/projected/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-kube-api-access-2x65g\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.187619 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.266705 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.521603 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97943a79-c8f8-4259-8cb4-1845307b8628" path="/var/lib/kubelet/pods/97943a79-c8f8-4259-8cb4-1845307b8628/volumes" Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.791692 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:18:36 crc kubenswrapper[4838]: W0202 11:18:36.801013 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7bc80d3e_05b1_4bfa_9c94_3d7c162420aa.slice/crio-4e82ad6f3d4ffc36061a30dfbb2eb66d097176ff8e1b190b3af10bccb7407012 WatchSource:0}: Error finding container 4e82ad6f3d4ffc36061a30dfbb2eb66d097176ff8e1b190b3af10bccb7407012: Status 404 returned error can't find the container with id 4e82ad6f3d4ffc36061a30dfbb2eb66d097176ff8e1b190b3af10bccb7407012 Feb 02 11:18:36 crc kubenswrapper[4838]: I0202 11:18:36.837012 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa","Type":"ContainerStarted","Data":"4e82ad6f3d4ffc36061a30dfbb2eb66d097176ff8e1b190b3af10bccb7407012"} Feb 02 11:18:40 crc kubenswrapper[4838]: I0202 11:18:40.877079 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa","Type":"ContainerStarted","Data":"81d235f814a1853993fc8a93e5e2fcd2d6f606b6a9168da6972bf64ba8ac9164"} Feb 02 11:18:41 crc kubenswrapper[4838]: I0202 11:18:41.889978 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa","Type":"ContainerStarted","Data":"b78a230bd6b7841ac2d8cc3e207e9220466f64b3d78b9e04be7d93d07cfbc61a"} Feb 02 11:18:43 crc kubenswrapper[4838]: I0202 11:18:43.917502 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa","Type":"ContainerStarted","Data":"18cc5dfadc68883ae64300002aa4f2a2b79b50fd4225429116d760319e35826d"} Feb 02 11:18:45 crc kubenswrapper[4838]: I0202 11:18:45.796228 4838 scope.go:117] "RemoveContainer" containerID="fca4963b9b40891c5fe798ef2c5fac1b8bc291516f36c0418b0a8d6d17c4014a" Feb 02 11:18:46 crc kubenswrapper[4838]: I0202 11:18:46.953075 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa","Type":"ContainerStarted","Data":"d7d5343b65a57e6bc9243a2d381186e1b1cc09829c85f0e0377e8038c6d5b217"} Feb 02 11:18:46 crc kubenswrapper[4838]: I0202 11:18:46.953954 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 11:18:46 crc kubenswrapper[4838]: I0202 11:18:46.989048 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.712664817 podStartE2EDuration="11.989029092s" podCreationTimestamp="2026-02-02 11:18:35 +0000 UTC" firstStartedPulling="2026-02-02 11:18:36.808276956 +0000 UTC m=+1511.145377984" lastFinishedPulling="2026-02-02 11:18:46.084641221 +0000 UTC m=+1520.421742259" observedRunningTime="2026-02-02 11:18:46.984800851 +0000 UTC m=+1521.321901889" watchObservedRunningTime="2026-02-02 11:18:46.989029092 +0000 UTC m=+1521.326130120" Feb 02 11:18:48 crc kubenswrapper[4838]: I0202 11:18:48.969955 4838 generic.go:334] "Generic (PLEG): container finished" podID="1a115a1d-336b-4c0d-81c4-3ce5c52b05a5" containerID="972ee48d93289c4265f7af6fcfe273ad08f49a74e4bfd894462152685b79a3dd" exitCode=0 Feb 02 11:18:48 crc kubenswrapper[4838]: I0202 11:18:48.970261 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-db-sync-s47dm" event={"ID":"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5","Type":"ContainerDied","Data":"972ee48d93289c4265f7af6fcfe273ad08f49a74e4bfd894462152685b79a3dd"} Feb 02 11:18:48 crc kubenswrapper[4838]: I0202 11:18:48.972590 4838 generic.go:334] "Generic (PLEG): container finished" podID="948b1ebc-d2a2-4e7b-aa2e-d215bbae015d" containerID="5d5e907869030a55ad318e1d933bff3c790413853b7dba2978c0d1386f6b4b5c" exitCode=0 Feb 02 11:18:48 crc kubenswrapper[4838]: I0202 11:18:48.972646 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-4r9v4" event={"ID":"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d","Type":"ContainerDied","Data":"5d5e907869030a55ad318e1d933bff3c790413853b7dba2978c0d1386f6b4b5c"} Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.416465 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-4r9v4" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.423660 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.524482 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-scripts\") pod \"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d\" (UID: \"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d\") " Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.524537 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-combined-ca-bundle\") pod \"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d\" (UID: \"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d\") " Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.525927 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-config-data\") pod \"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d\" (UID: \"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d\") " Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.528689 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4s2d\" (UniqueName: \"kubernetes.io/projected/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-kube-api-access-f4s2d\") pod \"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d\" (UID: \"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d\") " Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.530792 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-scripts" (OuterVolumeSpecName: "scripts") pod "948b1ebc-d2a2-4e7b-aa2e-d215bbae015d" (UID: "948b1ebc-d2a2-4e7b-aa2e-d215bbae015d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.532434 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.534344 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-kube-api-access-f4s2d" (OuterVolumeSpecName: "kube-api-access-f4s2d") pod "948b1ebc-d2a2-4e7b-aa2e-d215bbae015d" (UID: "948b1ebc-d2a2-4e7b-aa2e-d215bbae015d"). InnerVolumeSpecName "kube-api-access-f4s2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.560733 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-config-data" (OuterVolumeSpecName: "config-data") pod "948b1ebc-d2a2-4e7b-aa2e-d215bbae015d" (UID: "948b1ebc-d2a2-4e7b-aa2e-d215bbae015d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.564835 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "948b1ebc-d2a2-4e7b-aa2e-d215bbae015d" (UID: "948b1ebc-d2a2-4e7b-aa2e-d215bbae015d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.633523 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-var-lib-ironic\") pod \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.633781 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.633836 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxmdb\" (UniqueName: \"kubernetes.io/projected/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-kube-api-access-rxmdb\") pod \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.633885 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-var-lib-ironic" (OuterVolumeSpecName: "var-lib-ironic") pod "1a115a1d-336b-4c0d-81c4-3ce5c52b05a5" (UID: "1a115a1d-336b-4c0d-81c4-3ce5c52b05a5"). InnerVolumeSpecName "var-lib-ironic". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.633898 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-combined-ca-bundle\") pod \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.634059 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-etc-podinfo\") pod \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.634098 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-config\") pod \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.634190 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-scripts\") pod \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\" (UID: \"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5\") " Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.634226 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-var-lib-ironic-inspector-dhcp-hostsdir" (OuterVolumeSpecName: "var-lib-ironic-inspector-dhcp-hostsdir") pod "1a115a1d-336b-4c0d-81c4-3ce5c52b05a5" (UID: "1a115a1d-336b-4c0d-81c4-3ce5c52b05a5"). InnerVolumeSpecName "var-lib-ironic-inspector-dhcp-hostsdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.634919 4838 reconciler_common.go:293] "Volume detached for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-var-lib-ironic-inspector-dhcp-hostsdir\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.634945 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.634955 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4s2d\" (UniqueName: \"kubernetes.io/projected/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-kube-api-access-f4s2d\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.634964 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.634972 4838 reconciler_common.go:293] "Volume detached for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-var-lib-ironic\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.636719 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-kube-api-access-rxmdb" (OuterVolumeSpecName: "kube-api-access-rxmdb") pod "1a115a1d-336b-4c0d-81c4-3ce5c52b05a5" (UID: "1a115a1d-336b-4c0d-81c4-3ce5c52b05a5"). InnerVolumeSpecName "kube-api-access-rxmdb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.637231 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-scripts" (OuterVolumeSpecName: "scripts") pod "1a115a1d-336b-4c0d-81c4-3ce5c52b05a5" (UID: "1a115a1d-336b-4c0d-81c4-3ce5c52b05a5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.638778 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-etc-podinfo" (OuterVolumeSpecName: "etc-podinfo") pod "1a115a1d-336b-4c0d-81c4-3ce5c52b05a5" (UID: "1a115a1d-336b-4c0d-81c4-3ce5c52b05a5"). InnerVolumeSpecName "etc-podinfo". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.658936 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1a115a1d-336b-4c0d-81c4-3ce5c52b05a5" (UID: "1a115a1d-336b-4c0d-81c4-3ce5c52b05a5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.658959 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-config" (OuterVolumeSpecName: "config") pod "1a115a1d-336b-4c0d-81c4-3ce5c52b05a5" (UID: "1a115a1d-336b-4c0d-81c4-3ce5c52b05a5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.736803 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxmdb\" (UniqueName: \"kubernetes.io/projected/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-kube-api-access-rxmdb\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.736851 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.736861 4838 reconciler_common.go:293] "Volume detached for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-etc-podinfo\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.736871 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.736880 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a115a1d-336b-4c0d-81c4-3ce5c52b05a5-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.989823 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-db-sync-s47dm" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.990069 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-db-sync-s47dm" event={"ID":"1a115a1d-336b-4c0d-81c4-3ce5c52b05a5","Type":"ContainerDied","Data":"5dba079c547a7e43617308a6ef8253d7eccda695c5d1ec9a97ec9742e2cadddd"} Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.990104 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5dba079c547a7e43617308a6ef8253d7eccda695c5d1ec9a97ec9742e2cadddd" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.991306 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-4r9v4" event={"ID":"948b1ebc-d2a2-4e7b-aa2e-d215bbae015d","Type":"ContainerDied","Data":"9c13c41d0101d379bcf38fcbee927c7361193071fe464a0f57193d12f6a2d1de"} Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.991342 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9c13c41d0101d379bcf38fcbee927c7361193071fe464a0f57193d12f6a2d1de" Feb 02 11:18:50 crc kubenswrapper[4838]: I0202 11:18:50.991391 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-4r9v4" Feb 02 11:18:51 crc kubenswrapper[4838]: I0202 11:18:51.113862 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 11:18:51 crc kubenswrapper[4838]: E0202 11:18:51.115767 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a115a1d-336b-4c0d-81c4-3ce5c52b05a5" containerName="ironic-inspector-db-sync" Feb 02 11:18:51 crc kubenswrapper[4838]: I0202 11:18:51.115884 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a115a1d-336b-4c0d-81c4-3ce5c52b05a5" containerName="ironic-inspector-db-sync" Feb 02 11:18:51 crc kubenswrapper[4838]: E0202 11:18:51.115984 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="948b1ebc-d2a2-4e7b-aa2e-d215bbae015d" containerName="nova-cell0-conductor-db-sync" Feb 02 11:18:51 crc kubenswrapper[4838]: I0202 11:18:51.116095 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="948b1ebc-d2a2-4e7b-aa2e-d215bbae015d" containerName="nova-cell0-conductor-db-sync" Feb 02 11:18:51 crc kubenswrapper[4838]: I0202 11:18:51.116413 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a115a1d-336b-4c0d-81c4-3ce5c52b05a5" containerName="ironic-inspector-db-sync" Feb 02 11:18:51 crc kubenswrapper[4838]: I0202 11:18:51.118997 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="948b1ebc-d2a2-4e7b-aa2e-d215bbae015d" containerName="nova-cell0-conductor-db-sync" Feb 02 11:18:51 crc kubenswrapper[4838]: I0202 11:18:51.119935 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 02 11:18:51 crc kubenswrapper[4838]: I0202 11:18:51.122335 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 02 11:18:51 crc kubenswrapper[4838]: I0202 11:18:51.122578 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-5x2vp" Feb 02 11:18:51 crc kubenswrapper[4838]: I0202 11:18:51.127420 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 11:18:51 crc kubenswrapper[4838]: I0202 11:18:51.246729 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsrjb\" (UniqueName: \"kubernetes.io/projected/c15c4499-e01e-474c-b653-efaf4af4c881-kube-api-access-qsrjb\") pod \"nova-cell0-conductor-0\" (UID: \"c15c4499-e01e-474c-b653-efaf4af4c881\") " pod="openstack/nova-cell0-conductor-0" Feb 02 11:18:51 crc kubenswrapper[4838]: I0202 11:18:51.247039 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c15c4499-e01e-474c-b653-efaf4af4c881-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c15c4499-e01e-474c-b653-efaf4af4c881\") " pod="openstack/nova-cell0-conductor-0" Feb 02 11:18:51 crc kubenswrapper[4838]: I0202 11:18:51.247092 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c15c4499-e01e-474c-b653-efaf4af4c881-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c15c4499-e01e-474c-b653-efaf4af4c881\") " pod="openstack/nova-cell0-conductor-0" Feb 02 11:18:51 crc kubenswrapper[4838]: I0202 11:18:51.354561 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsrjb\" (UniqueName: \"kubernetes.io/projected/c15c4499-e01e-474c-b653-efaf4af4c881-kube-api-access-qsrjb\") pod \"nova-cell0-conductor-0\" (UID: \"c15c4499-e01e-474c-b653-efaf4af4c881\") " pod="openstack/nova-cell0-conductor-0" Feb 02 11:18:51 crc kubenswrapper[4838]: I0202 11:18:51.354610 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c15c4499-e01e-474c-b653-efaf4af4c881-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c15c4499-e01e-474c-b653-efaf4af4c881\") " pod="openstack/nova-cell0-conductor-0" Feb 02 11:18:51 crc kubenswrapper[4838]: I0202 11:18:51.355113 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c15c4499-e01e-474c-b653-efaf4af4c881-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c15c4499-e01e-474c-b653-efaf4af4c881\") " pod="openstack/nova-cell0-conductor-0" Feb 02 11:18:51 crc kubenswrapper[4838]: I0202 11:18:51.359313 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c15c4499-e01e-474c-b653-efaf4af4c881-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c15c4499-e01e-474c-b653-efaf4af4c881\") " pod="openstack/nova-cell0-conductor-0" Feb 02 11:18:51 crc kubenswrapper[4838]: I0202 11:18:51.364267 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c15c4499-e01e-474c-b653-efaf4af4c881-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c15c4499-e01e-474c-b653-efaf4af4c881\") " pod="openstack/nova-cell0-conductor-0" Feb 02 11:18:51 crc kubenswrapper[4838]: I0202 11:18:51.375989 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsrjb\" (UniqueName: \"kubernetes.io/projected/c15c4499-e01e-474c-b653-efaf4af4c881-kube-api-access-qsrjb\") pod \"nova-cell0-conductor-0\" (UID: \"c15c4499-e01e-474c-b653-efaf4af4c881\") " pod="openstack/nova-cell0-conductor-0" Feb 02 11:18:51 crc kubenswrapper[4838]: I0202 11:18:51.448735 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 02 11:18:51 crc kubenswrapper[4838]: W0202 11:18:51.884817 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc15c4499_e01e_474c_b653_efaf4af4c881.slice/crio-c7b210765ba5020ed5046659450c59367e9a0045e3dd03e1b0081661fd65dfe2 WatchSource:0}: Error finding container c7b210765ba5020ed5046659450c59367e9a0045e3dd03e1b0081661fd65dfe2: Status 404 returned error can't find the container with id c7b210765ba5020ed5046659450c59367e9a0045e3dd03e1b0081661fd65dfe2 Feb 02 11:18:51 crc kubenswrapper[4838]: I0202 11:18:51.884856 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.002737 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"c15c4499-e01e-474c-b653-efaf4af4c881","Type":"ContainerStarted","Data":"c7b210765ba5020ed5046659450c59367e9a0045e3dd03e1b0081661fd65dfe2"} Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.008785 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-conductor-0" event={"ID":"3318b8c1-22ca-45c4-a2fd-90205cea5a72","Type":"ContainerStarted","Data":"f25a2284d2dde9d906c1e133341c3751f6f15de2b2d4c39c678b0d2acb325e90"} Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.385158 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-inspector-0"] Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.389367 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-0" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.392420 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-inspector-config-data" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.392706 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-inspector-scripts" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.406153 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-inspector-0"] Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.588965 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmgzp\" (UniqueName: \"kubernetes.io/projected/d8ff2cf0-060c-40bd-8fad-021193615a79-kube-api-access-kmgzp\") pod \"ironic-inspector-0\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.589167 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/d8ff2cf0-060c-40bd-8fad-021193615a79-etc-podinfo\") pod \"ironic-inspector-0\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.589261 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8ff2cf0-060c-40bd-8fad-021193615a79-combined-ca-bundle\") pod \"ironic-inspector-0\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.589301 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8ff2cf0-060c-40bd-8fad-021193615a79-scripts\") pod \"ironic-inspector-0\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.589339 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/d8ff2cf0-060c-40bd-8fad-021193615a79-var-lib-ironic\") pod \"ironic-inspector-0\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.590296 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d8ff2cf0-060c-40bd-8fad-021193615a79-config\") pod \"ironic-inspector-0\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.590342 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/d8ff2cf0-060c-40bd-8fad-021193615a79-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"ironic-inspector-0\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.691665 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8ff2cf0-060c-40bd-8fad-021193615a79-combined-ca-bundle\") pod \"ironic-inspector-0\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.691715 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8ff2cf0-060c-40bd-8fad-021193615a79-scripts\") pod \"ironic-inspector-0\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.691746 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/d8ff2cf0-060c-40bd-8fad-021193615a79-var-lib-ironic\") pod \"ironic-inspector-0\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.691768 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d8ff2cf0-060c-40bd-8fad-021193615a79-config\") pod \"ironic-inspector-0\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.691788 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/d8ff2cf0-060c-40bd-8fad-021193615a79-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"ironic-inspector-0\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.691861 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmgzp\" (UniqueName: \"kubernetes.io/projected/d8ff2cf0-060c-40bd-8fad-021193615a79-kube-api-access-kmgzp\") pod \"ironic-inspector-0\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.691958 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/d8ff2cf0-060c-40bd-8fad-021193615a79-etc-podinfo\") pod \"ironic-inspector-0\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.692263 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/d8ff2cf0-060c-40bd-8fad-021193615a79-var-lib-ironic\") pod \"ironic-inspector-0\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.692659 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/d8ff2cf0-060c-40bd-8fad-021193615a79-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"ironic-inspector-0\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.697647 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/d8ff2cf0-060c-40bd-8fad-021193615a79-config\") pod \"ironic-inspector-0\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.698685 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8ff2cf0-060c-40bd-8fad-021193615a79-scripts\") pod \"ironic-inspector-0\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.700097 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8ff2cf0-060c-40bd-8fad-021193615a79-combined-ca-bundle\") pod \"ironic-inspector-0\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.714442 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/d8ff2cf0-060c-40bd-8fad-021193615a79-etc-podinfo\") pod \"ironic-inspector-0\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:52 crc kubenswrapper[4838]: I0202 11:18:52.719173 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmgzp\" (UniqueName: \"kubernetes.io/projected/d8ff2cf0-060c-40bd-8fad-021193615a79-kube-api-access-kmgzp\") pod \"ironic-inspector-0\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:53 crc kubenswrapper[4838]: I0202 11:18:53.022509 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-0" Feb 02 11:18:53 crc kubenswrapper[4838]: I0202 11:18:53.042036 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"c15c4499-e01e-474c-b653-efaf4af4c881","Type":"ContainerStarted","Data":"055ed90fce55d1295e3df2c4f7224b7f2ef165eba0200ab8cb13afa92a4cd6d0"} Feb 02 11:18:53 crc kubenswrapper[4838]: I0202 11:18:53.043161 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Feb 02 11:18:53 crc kubenswrapper[4838]: I0202 11:18:53.085288 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.085272269 podStartE2EDuration="2.085272269s" podCreationTimestamp="2026-02-02 11:18:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:18:53.069159113 +0000 UTC m=+1527.406260151" watchObservedRunningTime="2026-02-02 11:18:53.085272269 +0000 UTC m=+1527.422373307" Feb 02 11:18:53 crc kubenswrapper[4838]: I0202 11:18:53.656638 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-inspector-0"] Feb 02 11:18:54 crc kubenswrapper[4838]: I0202 11:18:54.057251 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"d8ff2cf0-060c-40bd-8fad-021193615a79","Type":"ContainerStarted","Data":"56cfff2e835f4ba8924049cdcc2f61a382bca216bf66ef4aca023467240efe27"} Feb 02 11:18:55 crc kubenswrapper[4838]: I0202 11:18:55.074103 4838 generic.go:334] "Generic (PLEG): container finished" podID="d8ff2cf0-060c-40bd-8fad-021193615a79" containerID="14c8b0ba74ffeb26a1c6df1dd1d24e33c2c4040e495a0938194dcb2c66737f3f" exitCode=0 Feb 02 11:18:55 crc kubenswrapper[4838]: I0202 11:18:55.074661 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"d8ff2cf0-060c-40bd-8fad-021193615a79","Type":"ContainerDied","Data":"14c8b0ba74ffeb26a1c6df1dd1d24e33c2c4040e495a0938194dcb2c66737f3f"} Feb 02 11:18:55 crc kubenswrapper[4838]: I0202 11:18:55.174850 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ironic-inspector-0"] Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.094047 4838 generic.go:334] "Generic (PLEG): container finished" podID="d8ff2cf0-060c-40bd-8fad-021193615a79" containerID="7fdf5e0d13c3b7d3bc7965eb1b34d7a349fc7e7e48cebf704274f8238b29fbb3" exitCode=0 Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.094134 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"d8ff2cf0-060c-40bd-8fad-021193615a79","Type":"ContainerDied","Data":"7fdf5e0d13c3b7d3bc7965eb1b34d7a349fc7e7e48cebf704274f8238b29fbb3"} Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.505152 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-0" Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.524563 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8ff2cf0-060c-40bd-8fad-021193615a79-scripts\") pod \"d8ff2cf0-060c-40bd-8fad-021193615a79\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.524664 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kmgzp\" (UniqueName: \"kubernetes.io/projected/d8ff2cf0-060c-40bd-8fad-021193615a79-kube-api-access-kmgzp\") pod \"d8ff2cf0-060c-40bd-8fad-021193615a79\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.524718 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/d8ff2cf0-060c-40bd-8fad-021193615a79-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"d8ff2cf0-060c-40bd-8fad-021193615a79\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.524829 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/d8ff2cf0-060c-40bd-8fad-021193615a79-etc-podinfo\") pod \"d8ff2cf0-060c-40bd-8fad-021193615a79\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.524913 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/d8ff2cf0-060c-40bd-8fad-021193615a79-var-lib-ironic\") pod \"d8ff2cf0-060c-40bd-8fad-021193615a79\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.524928 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d8ff2cf0-060c-40bd-8fad-021193615a79-config\") pod \"d8ff2cf0-060c-40bd-8fad-021193615a79\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.524974 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8ff2cf0-060c-40bd-8fad-021193615a79-combined-ca-bundle\") pod \"d8ff2cf0-060c-40bd-8fad-021193615a79\" (UID: \"d8ff2cf0-060c-40bd-8fad-021193615a79\") " Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.526046 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8ff2cf0-060c-40bd-8fad-021193615a79-var-lib-ironic-inspector-dhcp-hostsdir" (OuterVolumeSpecName: "var-lib-ironic-inspector-dhcp-hostsdir") pod "d8ff2cf0-060c-40bd-8fad-021193615a79" (UID: "d8ff2cf0-060c-40bd-8fad-021193615a79"). InnerVolumeSpecName "var-lib-ironic-inspector-dhcp-hostsdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.530225 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8ff2cf0-060c-40bd-8fad-021193615a79-var-lib-ironic" (OuterVolumeSpecName: "var-lib-ironic") pod "d8ff2cf0-060c-40bd-8fad-021193615a79" (UID: "d8ff2cf0-060c-40bd-8fad-021193615a79"). InnerVolumeSpecName "var-lib-ironic". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.537018 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8ff2cf0-060c-40bd-8fad-021193615a79-scripts" (OuterVolumeSpecName: "scripts") pod "d8ff2cf0-060c-40bd-8fad-021193615a79" (UID: "d8ff2cf0-060c-40bd-8fad-021193615a79"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.539805 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/d8ff2cf0-060c-40bd-8fad-021193615a79-etc-podinfo" (OuterVolumeSpecName: "etc-podinfo") pod "d8ff2cf0-060c-40bd-8fad-021193615a79" (UID: "d8ff2cf0-060c-40bd-8fad-021193615a79"). InnerVolumeSpecName "etc-podinfo". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.540457 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8ff2cf0-060c-40bd-8fad-021193615a79-config" (OuterVolumeSpecName: "config") pod "d8ff2cf0-060c-40bd-8fad-021193615a79" (UID: "d8ff2cf0-060c-40bd-8fad-021193615a79"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.540701 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8ff2cf0-060c-40bd-8fad-021193615a79-kube-api-access-kmgzp" (OuterVolumeSpecName: "kube-api-access-kmgzp") pod "d8ff2cf0-060c-40bd-8fad-021193615a79" (UID: "d8ff2cf0-060c-40bd-8fad-021193615a79"). InnerVolumeSpecName "kube-api-access-kmgzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.584952 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8ff2cf0-060c-40bd-8fad-021193615a79-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d8ff2cf0-060c-40bd-8fad-021193615a79" (UID: "d8ff2cf0-060c-40bd-8fad-021193615a79"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.626397 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8ff2cf0-060c-40bd-8fad-021193615a79-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.626424 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kmgzp\" (UniqueName: \"kubernetes.io/projected/d8ff2cf0-060c-40bd-8fad-021193615a79-kube-api-access-kmgzp\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.626436 4838 reconciler_common.go:293] "Volume detached for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/d8ff2cf0-060c-40bd-8fad-021193615a79-var-lib-ironic-inspector-dhcp-hostsdir\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.626444 4838 reconciler_common.go:293] "Volume detached for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/d8ff2cf0-060c-40bd-8fad-021193615a79-etc-podinfo\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.626453 4838 reconciler_common.go:293] "Volume detached for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/d8ff2cf0-060c-40bd-8fad-021193615a79-var-lib-ironic\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.626461 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/d8ff2cf0-060c-40bd-8fad-021193615a79-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:57 crc kubenswrapper[4838]: I0202 11:18:57.626470 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8ff2cf0-060c-40bd-8fad-021193615a79-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.105969 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"d8ff2cf0-060c-40bd-8fad-021193615a79","Type":"ContainerDied","Data":"56cfff2e835f4ba8924049cdcc2f61a382bca216bf66ef4aca023467240efe27"} Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.106039 4838 scope.go:117] "RemoveContainer" containerID="7fdf5e0d13c3b7d3bc7965eb1b34d7a349fc7e7e48cebf704274f8238b29fbb3" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.106046 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.148756 4838 scope.go:117] "RemoveContainer" containerID="14c8b0ba74ffeb26a1c6df1dd1d24e33c2c4040e495a0938194dcb2c66737f3f" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.196284 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ironic-inspector-0"] Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.229394 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ironic-inspector-0"] Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.243432 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-inspector-0"] Feb 02 11:18:58 crc kubenswrapper[4838]: E0202 11:18:58.243945 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8ff2cf0-060c-40bd-8fad-021193615a79" containerName="inspector-pxe-init" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.243973 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8ff2cf0-060c-40bd-8fad-021193615a79" containerName="inspector-pxe-init" Feb 02 11:18:58 crc kubenswrapper[4838]: E0202 11:18:58.244003 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8ff2cf0-060c-40bd-8fad-021193615a79" containerName="ironic-python-agent-init" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.244011 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8ff2cf0-060c-40bd-8fad-021193615a79" containerName="ironic-python-agent-init" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.244191 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8ff2cf0-060c-40bd-8fad-021193615a79" containerName="inspector-pxe-init" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.250139 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.252877 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-inspector-0"] Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.254356 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-inspector-scripts" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.258173 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ironic-inspector-public-svc" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.258372 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ironic-inspector-internal-svc" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.258400 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-inspector-config-data" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.340062 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe57cd8a-a524-426c-a2f4-401cd5642248-scripts\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.340131 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/fe57cd8a-a524-426c-a2f4-401cd5642248-etc-podinfo\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.340170 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fe57cd8a-a524-426c-a2f4-401cd5642248-config\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.340384 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/fe57cd8a-a524-426c-a2f4-401cd5642248-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.340464 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe57cd8a-a524-426c-a2f4-401cd5642248-internal-tls-certs\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.340846 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe57cd8a-a524-426c-a2f4-401cd5642248-public-tls-certs\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.340971 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7zhq\" (UniqueName: \"kubernetes.io/projected/fe57cd8a-a524-426c-a2f4-401cd5642248-kube-api-access-n7zhq\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.341081 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/fe57cd8a-a524-426c-a2f4-401cd5642248-var-lib-ironic\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.341125 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe57cd8a-a524-426c-a2f4-401cd5642248-combined-ca-bundle\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.442795 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7zhq\" (UniqueName: \"kubernetes.io/projected/fe57cd8a-a524-426c-a2f4-401cd5642248-kube-api-access-n7zhq\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.442874 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/fe57cd8a-a524-426c-a2f4-401cd5642248-var-lib-ironic\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.442907 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe57cd8a-a524-426c-a2f4-401cd5642248-combined-ca-bundle\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.442975 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe57cd8a-a524-426c-a2f4-401cd5642248-scripts\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.443012 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/fe57cd8a-a524-426c-a2f4-401cd5642248-etc-podinfo\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.443041 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fe57cd8a-a524-426c-a2f4-401cd5642248-config\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.443090 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/fe57cd8a-a524-426c-a2f4-401cd5642248-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.443121 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe57cd8a-a524-426c-a2f4-401cd5642248-internal-tls-certs\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.443148 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe57cd8a-a524-426c-a2f4-401cd5642248-public-tls-certs\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.443706 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/fe57cd8a-a524-426c-a2f4-401cd5642248-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.443822 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/fe57cd8a-a524-426c-a2f4-401cd5642248-var-lib-ironic\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.462237 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe57cd8a-a524-426c-a2f4-401cd5642248-combined-ca-bundle\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.462315 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe57cd8a-a524-426c-a2f4-401cd5642248-public-tls-certs\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.462940 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe57cd8a-a524-426c-a2f4-401cd5642248-scripts\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.463109 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe57cd8a-a524-426c-a2f4-401cd5642248-internal-tls-certs\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.463357 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/fe57cd8a-a524-426c-a2f4-401cd5642248-etc-podinfo\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.463378 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/fe57cd8a-a524-426c-a2f4-401cd5642248-config\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.465216 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7zhq\" (UniqueName: \"kubernetes.io/projected/fe57cd8a-a524-426c-a2f4-401cd5642248-kube-api-access-n7zhq\") pod \"ironic-inspector-0\" (UID: \"fe57cd8a-a524-426c-a2f4-401cd5642248\") " pod="openstack/ironic-inspector-0" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.517045 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8ff2cf0-060c-40bd-8fad-021193615a79" path="/var/lib/kubelet/pods/d8ff2cf0-060c-40bd-8fad-021193615a79/volumes" Feb 02 11:18:58 crc kubenswrapper[4838]: I0202 11:18:58.572820 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-0" Feb 02 11:18:59 crc kubenswrapper[4838]: I0202 11:18:59.065711 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-inspector-0"] Feb 02 11:18:59 crc kubenswrapper[4838]: W0202 11:18:59.077267 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe57cd8a_a524_426c_a2f4_401cd5642248.slice/crio-a397d9b3e11cf866d12cc0ed07776d44413e336ec75a794211fda3fc91160959 WatchSource:0}: Error finding container a397d9b3e11cf866d12cc0ed07776d44413e336ec75a794211fda3fc91160959: Status 404 returned error can't find the container with id a397d9b3e11cf866d12cc0ed07776d44413e336ec75a794211fda3fc91160959 Feb 02 11:18:59 crc kubenswrapper[4838]: I0202 11:18:59.122454 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"fe57cd8a-a524-426c-a2f4-401cd5642248","Type":"ContainerStarted","Data":"a397d9b3e11cf866d12cc0ed07776d44413e336ec75a794211fda3fc91160959"} Feb 02 11:19:00 crc kubenswrapper[4838]: I0202 11:19:00.135838 4838 generic.go:334] "Generic (PLEG): container finished" podID="fe57cd8a-a524-426c-a2f4-401cd5642248" containerID="bbadf20018706898d7a83a22e5d878263abd6c640c5f27dea28fdd15467c4cc6" exitCode=0 Feb 02 11:19:00 crc kubenswrapper[4838]: I0202 11:19:00.135928 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"fe57cd8a-a524-426c-a2f4-401cd5642248","Type":"ContainerDied","Data":"bbadf20018706898d7a83a22e5d878263abd6c640c5f27dea28fdd15467c4cc6"} Feb 02 11:19:01 crc kubenswrapper[4838]: I0202 11:19:01.146683 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"fe57cd8a-a524-426c-a2f4-401cd5642248","Type":"ContainerStarted","Data":"696fb63a5d73b25e9ceb9df9cf863108741784215666f798d73500ee1b724714"} Feb 02 11:19:01 crc kubenswrapper[4838]: I0202 11:19:01.481282 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.010322 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-2d8xd"] Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.035382 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-2d8xd" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.038987 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.039084 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.047462 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-2d8xd"] Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.132166 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f9f491b-916e-4236-96b5-8da53babac04-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-2d8xd\" (UID: \"9f9f491b-916e-4236-96b5-8da53babac04\") " pod="openstack/nova-cell0-cell-mapping-2d8xd" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.132309 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f9f491b-916e-4236-96b5-8da53babac04-scripts\") pod \"nova-cell0-cell-mapping-2d8xd\" (UID: \"9f9f491b-916e-4236-96b5-8da53babac04\") " pod="openstack/nova-cell0-cell-mapping-2d8xd" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.132485 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f9f491b-916e-4236-96b5-8da53babac04-config-data\") pod \"nova-cell0-cell-mapping-2d8xd\" (UID: \"9f9f491b-916e-4236-96b5-8da53babac04\") " pod="openstack/nova-cell0-cell-mapping-2d8xd" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.132522 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wvqq\" (UniqueName: \"kubernetes.io/projected/9f9f491b-916e-4236-96b5-8da53babac04-kube-api-access-2wvqq\") pod \"nova-cell0-cell-mapping-2d8xd\" (UID: \"9f9f491b-916e-4236-96b5-8da53babac04\") " pod="openstack/nova-cell0-cell-mapping-2d8xd" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.178788 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.180992 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.183800 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.193964 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.202423 4838 generic.go:334] "Generic (PLEG): container finished" podID="fe57cd8a-a524-426c-a2f4-401cd5642248" containerID="696fb63a5d73b25e9ceb9df9cf863108741784215666f798d73500ee1b724714" exitCode=0 Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.204714 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"fe57cd8a-a524-426c-a2f4-401cd5642248","Type":"ContainerDied","Data":"696fb63a5d73b25e9ceb9df9cf863108741784215666f798d73500ee1b724714"} Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.204796 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.207498 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.229357 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.236917 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f9f491b-916e-4236-96b5-8da53babac04-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-2d8xd\" (UID: \"9f9f491b-916e-4236-96b5-8da53babac04\") " pod="openstack/nova-cell0-cell-mapping-2d8xd" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.237393 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f97c9e6f-a864-4903-b775-0cef0afed268-config-data\") pod \"nova-scheduler-0\" (UID: \"f97c9e6f-a864-4903-b775-0cef0afed268\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.237557 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f97c9e6f-a864-4903-b775-0cef0afed268-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f97c9e6f-a864-4903-b775-0cef0afed268\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.237792 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"06d2d265-0f5f-4b18-b02f-8c83b6c8d547\") " pod="openstack/nova-api-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.238024 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-logs\") pod \"nova-api-0\" (UID: \"06d2d265-0f5f-4b18-b02f-8c83b6c8d547\") " pod="openstack/nova-api-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.238188 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f9f491b-916e-4236-96b5-8da53babac04-scripts\") pod \"nova-cell0-cell-mapping-2d8xd\" (UID: \"9f9f491b-916e-4236-96b5-8da53babac04\") " pod="openstack/nova-cell0-cell-mapping-2d8xd" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.239315 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwr5t\" (UniqueName: \"kubernetes.io/projected/f97c9e6f-a864-4903-b775-0cef0afed268-kube-api-access-qwr5t\") pod \"nova-scheduler-0\" (UID: \"f97c9e6f-a864-4903-b775-0cef0afed268\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.240079 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f9f491b-916e-4236-96b5-8da53babac04-config-data\") pod \"nova-cell0-cell-mapping-2d8xd\" (UID: \"9f9f491b-916e-4236-96b5-8da53babac04\") " pod="openstack/nova-cell0-cell-mapping-2d8xd" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.246261 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wvqq\" (UniqueName: \"kubernetes.io/projected/9f9f491b-916e-4236-96b5-8da53babac04-kube-api-access-2wvqq\") pod \"nova-cell0-cell-mapping-2d8xd\" (UID: \"9f9f491b-916e-4236-96b5-8da53babac04\") " pod="openstack/nova-cell0-cell-mapping-2d8xd" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.246572 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-config-data\") pod \"nova-api-0\" (UID: \"06d2d265-0f5f-4b18-b02f-8c83b6c8d547\") " pod="openstack/nova-api-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.246862 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhwkz\" (UniqueName: \"kubernetes.io/projected/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-kube-api-access-nhwkz\") pod \"nova-api-0\" (UID: \"06d2d265-0f5f-4b18-b02f-8c83b6c8d547\") " pod="openstack/nova-api-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.251989 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f9f491b-916e-4236-96b5-8da53babac04-config-data\") pod \"nova-cell0-cell-mapping-2d8xd\" (UID: \"9f9f491b-916e-4236-96b5-8da53babac04\") " pod="openstack/nova-cell0-cell-mapping-2d8xd" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.252447 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f9f491b-916e-4236-96b5-8da53babac04-scripts\") pod \"nova-cell0-cell-mapping-2d8xd\" (UID: \"9f9f491b-916e-4236-96b5-8da53babac04\") " pod="openstack/nova-cell0-cell-mapping-2d8xd" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.280126 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f9f491b-916e-4236-96b5-8da53babac04-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-2d8xd\" (UID: \"9f9f491b-916e-4236-96b5-8da53babac04\") " pod="openstack/nova-cell0-cell-mapping-2d8xd" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.285037 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.321059 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wvqq\" (UniqueName: \"kubernetes.io/projected/9f9f491b-916e-4236-96b5-8da53babac04-kube-api-access-2wvqq\") pod \"nova-cell0-cell-mapping-2d8xd\" (UID: \"9f9f491b-916e-4236-96b5-8da53babac04\") " pod="openstack/nova-cell0-cell-mapping-2d8xd" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.351809 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f97c9e6f-a864-4903-b775-0cef0afed268-config-data\") pod \"nova-scheduler-0\" (UID: \"f97c9e6f-a864-4903-b775-0cef0afed268\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.351857 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f97c9e6f-a864-4903-b775-0cef0afed268-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f97c9e6f-a864-4903-b775-0cef0afed268\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.351885 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"06d2d265-0f5f-4b18-b02f-8c83b6c8d547\") " pod="openstack/nova-api-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.351913 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-logs\") pod \"nova-api-0\" (UID: \"06d2d265-0f5f-4b18-b02f-8c83b6c8d547\") " pod="openstack/nova-api-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.351961 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwr5t\" (UniqueName: \"kubernetes.io/projected/f97c9e6f-a864-4903-b775-0cef0afed268-kube-api-access-qwr5t\") pod \"nova-scheduler-0\" (UID: \"f97c9e6f-a864-4903-b775-0cef0afed268\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.352013 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-config-data\") pod \"nova-api-0\" (UID: \"06d2d265-0f5f-4b18-b02f-8c83b6c8d547\") " pod="openstack/nova-api-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.352052 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhwkz\" (UniqueName: \"kubernetes.io/projected/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-kube-api-access-nhwkz\") pod \"nova-api-0\" (UID: \"06d2d265-0f5f-4b18-b02f-8c83b6c8d547\") " pod="openstack/nova-api-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.353206 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-logs\") pod \"nova-api-0\" (UID: \"06d2d265-0f5f-4b18-b02f-8c83b6c8d547\") " pod="openstack/nova-api-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.371027 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"06d2d265-0f5f-4b18-b02f-8c83b6c8d547\") " pod="openstack/nova-api-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.371356 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f97c9e6f-a864-4903-b775-0cef0afed268-config-data\") pod \"nova-scheduler-0\" (UID: \"f97c9e6f-a864-4903-b775-0cef0afed268\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.371898 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f97c9e6f-a864-4903-b775-0cef0afed268-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f97c9e6f-a864-4903-b775-0cef0afed268\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.372397 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-config-data\") pod \"nova-api-0\" (UID: \"06d2d265-0f5f-4b18-b02f-8c83b6c8d547\") " pod="openstack/nova-api-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.380665 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-2d8xd" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.404507 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhwkz\" (UniqueName: \"kubernetes.io/projected/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-kube-api-access-nhwkz\") pod \"nova-api-0\" (UID: \"06d2d265-0f5f-4b18-b02f-8c83b6c8d547\") " pod="openstack/nova-api-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.409148 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwr5t\" (UniqueName: \"kubernetes.io/projected/f97c9e6f-a864-4903-b775-0cef0afed268-kube-api-access-qwr5t\") pod \"nova-scheduler-0\" (UID: \"f97c9e6f-a864-4903-b775-0cef0afed268\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.473566 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.476134 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.486675 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.488764 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.489429 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.530284 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.542132 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.543193 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.546796 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.553353 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.567849 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qb7k\" (UniqueName: \"kubernetes.io/projected/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-kube-api-access-7qb7k\") pod \"nova-metadata-0\" (UID: \"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe\") " pod="openstack/nova-metadata-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.567959 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-logs\") pod \"nova-metadata-0\" (UID: \"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe\") " pod="openstack/nova-metadata-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.568028 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-config-data\") pod \"nova-metadata-0\" (UID: \"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe\") " pod="openstack/nova-metadata-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.568219 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe\") " pod="openstack/nova-metadata-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.568793 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78d445889f-948j8"] Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.573264 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.579574 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78d445889f-948j8"] Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.681594 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qb7k\" (UniqueName: \"kubernetes.io/projected/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-kube-api-access-7qb7k\") pod \"nova-metadata-0\" (UID: \"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe\") " pod="openstack/nova-metadata-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.681676 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-logs\") pod \"nova-metadata-0\" (UID: \"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe\") " pod="openstack/nova-metadata-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.681710 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97a5f67e-673d-47c3-826d-75f217906282-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"97a5f67e-673d-47c3-826d-75f217906282\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.681730 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-ovsdbserver-sb\") pod \"dnsmasq-dns-78d445889f-948j8\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.681754 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdg8b\" (UniqueName: \"kubernetes.io/projected/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-kube-api-access-fdg8b\") pod \"dnsmasq-dns-78d445889f-948j8\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.681772 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-config-data\") pod \"nova-metadata-0\" (UID: \"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe\") " pod="openstack/nova-metadata-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.681788 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-dns-svc\") pod \"dnsmasq-dns-78d445889f-948j8\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.681834 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-config\") pod \"dnsmasq-dns-78d445889f-948j8\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.681882 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-dns-swift-storage-0\") pod \"dnsmasq-dns-78d445889f-948j8\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.681910 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe\") " pod="openstack/nova-metadata-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.681956 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-ovsdbserver-nb\") pod \"dnsmasq-dns-78d445889f-948j8\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.681981 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97a5f67e-673d-47c3-826d-75f217906282-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"97a5f67e-673d-47c3-826d-75f217906282\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.682014 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfrj7\" (UniqueName: \"kubernetes.io/projected/97a5f67e-673d-47c3-826d-75f217906282-kube-api-access-nfrj7\") pod \"nova-cell1-novncproxy-0\" (UID: \"97a5f67e-673d-47c3-826d-75f217906282\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.682748 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-logs\") pod \"nova-metadata-0\" (UID: \"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe\") " pod="openstack/nova-metadata-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.695324 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe\") " pod="openstack/nova-metadata-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.700376 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-config-data\") pod \"nova-metadata-0\" (UID: \"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe\") " pod="openstack/nova-metadata-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.722687 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qb7k\" (UniqueName: \"kubernetes.io/projected/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-kube-api-access-7qb7k\") pod \"nova-metadata-0\" (UID: \"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe\") " pod="openstack/nova-metadata-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.792721 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-ovsdbserver-nb\") pod \"dnsmasq-dns-78d445889f-948j8\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.792777 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97a5f67e-673d-47c3-826d-75f217906282-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"97a5f67e-673d-47c3-826d-75f217906282\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.792814 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfrj7\" (UniqueName: \"kubernetes.io/projected/97a5f67e-673d-47c3-826d-75f217906282-kube-api-access-nfrj7\") pod \"nova-cell1-novncproxy-0\" (UID: \"97a5f67e-673d-47c3-826d-75f217906282\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.792859 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97a5f67e-673d-47c3-826d-75f217906282-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"97a5f67e-673d-47c3-826d-75f217906282\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.792879 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-ovsdbserver-sb\") pod \"dnsmasq-dns-78d445889f-948j8\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.792899 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-dns-svc\") pod \"dnsmasq-dns-78d445889f-948j8\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.792917 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdg8b\" (UniqueName: \"kubernetes.io/projected/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-kube-api-access-fdg8b\") pod \"dnsmasq-dns-78d445889f-948j8\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.792959 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-config\") pod \"dnsmasq-dns-78d445889f-948j8\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.792987 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-dns-swift-storage-0\") pod \"dnsmasq-dns-78d445889f-948j8\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.793992 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-dns-swift-storage-0\") pod \"dnsmasq-dns-78d445889f-948j8\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.794559 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-ovsdbserver-nb\") pod \"dnsmasq-dns-78d445889f-948j8\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.796086 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-ovsdbserver-sb\") pod \"dnsmasq-dns-78d445889f-948j8\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.796689 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-dns-svc\") pod \"dnsmasq-dns-78d445889f-948j8\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.797746 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-config\") pod \"dnsmasq-dns-78d445889f-948j8\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.805279 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97a5f67e-673d-47c3-826d-75f217906282-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"97a5f67e-673d-47c3-826d-75f217906282\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.816868 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.821536 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfrj7\" (UniqueName: \"kubernetes.io/projected/97a5f67e-673d-47c3-826d-75f217906282-kube-api-access-nfrj7\") pod \"nova-cell1-novncproxy-0\" (UID: \"97a5f67e-673d-47c3-826d-75f217906282\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.822220 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97a5f67e-673d-47c3-826d-75f217906282-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"97a5f67e-673d-47c3-826d-75f217906282\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.829469 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdg8b\" (UniqueName: \"kubernetes.io/projected/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-kube-api-access-fdg8b\") pod \"dnsmasq-dns-78d445889f-948j8\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.866748 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:02 crc kubenswrapper[4838]: I0202 11:19:02.905065 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:03 crc kubenswrapper[4838]: I0202 11:19:03.336763 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 11:19:03 crc kubenswrapper[4838]: I0202 11:19:03.353674 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-2d8xd"] Feb 02 11:19:03 crc kubenswrapper[4838]: W0202 11:19:03.610227 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod232b1b0a_b35d_4834_a4bb_3a1ed7de6f71.slice/crio-adddfa0a3b712e2ee614e717c4aa9923456562be9403be804d30246b1c61579f WatchSource:0}: Error finding container adddfa0a3b712e2ee614e717c4aa9923456562be9403be804d30246b1c61579f: Status 404 returned error can't find the container with id adddfa0a3b712e2ee614e717c4aa9923456562be9403be804d30246b1c61579f Feb 02 11:19:03 crc kubenswrapper[4838]: I0202 11:19:03.619143 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78d445889f-948j8"] Feb 02 11:19:03 crc kubenswrapper[4838]: W0202 11:19:03.633558 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf97c9e6f_a864_4903_b775_0cef0afed268.slice/crio-9ea1b9d4ee7edbd1db5e014b145cc700a409a72d3e718899e8b31f0676d868d7 WatchSource:0}: Error finding container 9ea1b9d4ee7edbd1db5e014b145cc700a409a72d3e718899e8b31f0676d868d7: Status 404 returned error can't find the container with id 9ea1b9d4ee7edbd1db5e014b145cc700a409a72d3e718899e8b31f0676d868d7 Feb 02 11:19:03 crc kubenswrapper[4838]: I0202 11:19:03.638146 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 11:19:04 crc kubenswrapper[4838]: I0202 11:19:04.015257 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:19:04 crc kubenswrapper[4838]: W0202 11:19:04.019440 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8bb7f20_b07d_49a4_b9d0_7e1cc5f8f6fe.slice/crio-ad8f2af39fe2261fc99a91129bab044992d92866641413380415e20a066898a1 WatchSource:0}: Error finding container ad8f2af39fe2261fc99a91129bab044992d92866641413380415e20a066898a1: Status 404 returned error can't find the container with id ad8f2af39fe2261fc99a91129bab044992d92866641413380415e20a066898a1 Feb 02 11:19:04 crc kubenswrapper[4838]: I0202 11:19:04.189968 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 11:19:04 crc kubenswrapper[4838]: W0202 11:19:04.191949 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97a5f67e_673d_47c3_826d_75f217906282.slice/crio-288dc7f2953eaaa631b4d98e29bfdaaf6987c69f5cbf155c4f708d0a9147db2d WatchSource:0}: Error finding container 288dc7f2953eaaa631b4d98e29bfdaaf6987c69f5cbf155c4f708d0a9147db2d: Status 404 returned error can't find the container with id 288dc7f2953eaaa631b4d98e29bfdaaf6987c69f5cbf155c4f708d0a9147db2d Feb 02 11:19:04 crc kubenswrapper[4838]: I0202 11:19:04.230586 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"06d2d265-0f5f-4b18-b02f-8c83b6c8d547","Type":"ContainerStarted","Data":"ead273bbad5a08da7b91d8fa3e91c482b2e6b3afa36ceda81af88d09f8948be5"} Feb 02 11:19:04 crc kubenswrapper[4838]: I0202 11:19:04.235555 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-2d8xd" event={"ID":"9f9f491b-916e-4236-96b5-8da53babac04","Type":"ContainerStarted","Data":"1c67450f6a87b6b56d33c9406dd6782b603d142421c0269b7dcd1f199fb6d160"} Feb 02 11:19:04 crc kubenswrapper[4838]: I0202 11:19:04.237467 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f97c9e6f-a864-4903-b775-0cef0afed268","Type":"ContainerStarted","Data":"9ea1b9d4ee7edbd1db5e014b145cc700a409a72d3e718899e8b31f0676d868d7"} Feb 02 11:19:04 crc kubenswrapper[4838]: I0202 11:19:04.239744 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe","Type":"ContainerStarted","Data":"ad8f2af39fe2261fc99a91129bab044992d92866641413380415e20a066898a1"} Feb 02 11:19:04 crc kubenswrapper[4838]: I0202 11:19:04.241348 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78d445889f-948j8" event={"ID":"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71","Type":"ContainerStarted","Data":"adddfa0a3b712e2ee614e717c4aa9923456562be9403be804d30246b1c61579f"} Feb 02 11:19:04 crc kubenswrapper[4838]: I0202 11:19:04.244591 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"97a5f67e-673d-47c3-826d-75f217906282","Type":"ContainerStarted","Data":"288dc7f2953eaaa631b4d98e29bfdaaf6987c69f5cbf155c4f708d0a9147db2d"} Feb 02 11:19:04 crc kubenswrapper[4838]: I0202 11:19:04.857433 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-5tpqb"] Feb 02 11:19:04 crc kubenswrapper[4838]: I0202 11:19:04.858883 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-5tpqb" Feb 02 11:19:04 crc kubenswrapper[4838]: I0202 11:19:04.866284 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-5tpqb"] Feb 02 11:19:04 crc kubenswrapper[4838]: I0202 11:19:04.866502 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 02 11:19:04 crc kubenswrapper[4838]: I0202 11:19:04.866528 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Feb 02 11:19:04 crc kubenswrapper[4838]: I0202 11:19:04.961740 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ce26605-8dfc-48cd-a362-1a37c67ea300-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-5tpqb\" (UID: \"7ce26605-8dfc-48cd-a362-1a37c67ea300\") " pod="openstack/nova-cell1-conductor-db-sync-5tpqb" Feb 02 11:19:04 crc kubenswrapper[4838]: I0202 11:19:04.961790 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ce26605-8dfc-48cd-a362-1a37c67ea300-scripts\") pod \"nova-cell1-conductor-db-sync-5tpqb\" (UID: \"7ce26605-8dfc-48cd-a362-1a37c67ea300\") " pod="openstack/nova-cell1-conductor-db-sync-5tpqb" Feb 02 11:19:04 crc kubenswrapper[4838]: I0202 11:19:04.961933 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ce26605-8dfc-48cd-a362-1a37c67ea300-config-data\") pod \"nova-cell1-conductor-db-sync-5tpqb\" (UID: \"7ce26605-8dfc-48cd-a362-1a37c67ea300\") " pod="openstack/nova-cell1-conductor-db-sync-5tpqb" Feb 02 11:19:04 crc kubenswrapper[4838]: I0202 11:19:04.961962 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bl4n2\" (UniqueName: \"kubernetes.io/projected/7ce26605-8dfc-48cd-a362-1a37c67ea300-kube-api-access-bl4n2\") pod \"nova-cell1-conductor-db-sync-5tpqb\" (UID: \"7ce26605-8dfc-48cd-a362-1a37c67ea300\") " pod="openstack/nova-cell1-conductor-db-sync-5tpqb" Feb 02 11:19:05 crc kubenswrapper[4838]: I0202 11:19:05.064095 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ce26605-8dfc-48cd-a362-1a37c67ea300-config-data\") pod \"nova-cell1-conductor-db-sync-5tpqb\" (UID: \"7ce26605-8dfc-48cd-a362-1a37c67ea300\") " pod="openstack/nova-cell1-conductor-db-sync-5tpqb" Feb 02 11:19:05 crc kubenswrapper[4838]: I0202 11:19:05.064155 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bl4n2\" (UniqueName: \"kubernetes.io/projected/7ce26605-8dfc-48cd-a362-1a37c67ea300-kube-api-access-bl4n2\") pod \"nova-cell1-conductor-db-sync-5tpqb\" (UID: \"7ce26605-8dfc-48cd-a362-1a37c67ea300\") " pod="openstack/nova-cell1-conductor-db-sync-5tpqb" Feb 02 11:19:05 crc kubenswrapper[4838]: I0202 11:19:05.064184 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ce26605-8dfc-48cd-a362-1a37c67ea300-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-5tpqb\" (UID: \"7ce26605-8dfc-48cd-a362-1a37c67ea300\") " pod="openstack/nova-cell1-conductor-db-sync-5tpqb" Feb 02 11:19:05 crc kubenswrapper[4838]: I0202 11:19:05.064202 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ce26605-8dfc-48cd-a362-1a37c67ea300-scripts\") pod \"nova-cell1-conductor-db-sync-5tpqb\" (UID: \"7ce26605-8dfc-48cd-a362-1a37c67ea300\") " pod="openstack/nova-cell1-conductor-db-sync-5tpqb" Feb 02 11:19:05 crc kubenswrapper[4838]: I0202 11:19:05.067965 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ce26605-8dfc-48cd-a362-1a37c67ea300-scripts\") pod \"nova-cell1-conductor-db-sync-5tpqb\" (UID: \"7ce26605-8dfc-48cd-a362-1a37c67ea300\") " pod="openstack/nova-cell1-conductor-db-sync-5tpqb" Feb 02 11:19:05 crc kubenswrapper[4838]: I0202 11:19:05.077884 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ce26605-8dfc-48cd-a362-1a37c67ea300-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-5tpqb\" (UID: \"7ce26605-8dfc-48cd-a362-1a37c67ea300\") " pod="openstack/nova-cell1-conductor-db-sync-5tpqb" Feb 02 11:19:05 crc kubenswrapper[4838]: I0202 11:19:05.080841 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ce26605-8dfc-48cd-a362-1a37c67ea300-config-data\") pod \"nova-cell1-conductor-db-sync-5tpqb\" (UID: \"7ce26605-8dfc-48cd-a362-1a37c67ea300\") " pod="openstack/nova-cell1-conductor-db-sync-5tpqb" Feb 02 11:19:05 crc kubenswrapper[4838]: I0202 11:19:05.083693 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bl4n2\" (UniqueName: \"kubernetes.io/projected/7ce26605-8dfc-48cd-a362-1a37c67ea300-kube-api-access-bl4n2\") pod \"nova-cell1-conductor-db-sync-5tpqb\" (UID: \"7ce26605-8dfc-48cd-a362-1a37c67ea300\") " pod="openstack/nova-cell1-conductor-db-sync-5tpqb" Feb 02 11:19:05 crc kubenswrapper[4838]: I0202 11:19:05.228919 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-5tpqb" Feb 02 11:19:05 crc kubenswrapper[4838]: I0202 11:19:05.257539 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-2d8xd" event={"ID":"9f9f491b-916e-4236-96b5-8da53babac04","Type":"ContainerStarted","Data":"43c015336232612c23c346095e9d080a828e7b82c35111545a72241601c2a52f"} Feb 02 11:19:05 crc kubenswrapper[4838]: I0202 11:19:05.266605 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"fe57cd8a-a524-426c-a2f4-401cd5642248","Type":"ContainerStarted","Data":"a0f9f49130eab8c19d96d9f91b9d18bdf60a52f4d160f8eb99054acdf82d8375"} Feb 02 11:19:05 crc kubenswrapper[4838]: I0202 11:19:05.273633 4838 generic.go:334] "Generic (PLEG): container finished" podID="232b1b0a-b35d-4834-a4bb-3a1ed7de6f71" containerID="dee22b825ffd2fda3a4ac6f30ab7dd95a42ada67172e2d8a5ff74021eaa4adaf" exitCode=0 Feb 02 11:19:05 crc kubenswrapper[4838]: I0202 11:19:05.273676 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78d445889f-948j8" event={"ID":"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71","Type":"ContainerDied","Data":"dee22b825ffd2fda3a4ac6f30ab7dd95a42ada67172e2d8a5ff74021eaa4adaf"} Feb 02 11:19:05 crc kubenswrapper[4838]: I0202 11:19:05.282400 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-2d8xd" podStartSLOduration=4.282376656 podStartE2EDuration="4.282376656s" podCreationTimestamp="2026-02-02 11:19:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:19:05.274925669 +0000 UTC m=+1539.612026697" watchObservedRunningTime="2026-02-02 11:19:05.282376656 +0000 UTC m=+1539.619477684" Feb 02 11:19:05 crc kubenswrapper[4838]: I0202 11:19:05.726474 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-5tpqb"] Feb 02 11:19:05 crc kubenswrapper[4838]: W0202 11:19:05.745879 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ce26605_8dfc_48cd_a362_1a37c67ea300.slice/crio-356dfba6041eaa50822378c9d084733e611dc746e56790bfaab83dd3c22ee6da WatchSource:0}: Error finding container 356dfba6041eaa50822378c9d084733e611dc746e56790bfaab83dd3c22ee6da: Status 404 returned error can't find the container with id 356dfba6041eaa50822378c9d084733e611dc746e56790bfaab83dd3c22ee6da Feb 02 11:19:05 crc kubenswrapper[4838]: I0202 11:19:05.833108 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:19:05 crc kubenswrapper[4838]: I0202 11:19:05.844711 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 11:19:06 crc kubenswrapper[4838]: I0202 11:19:06.292549 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-5tpqb" event={"ID":"7ce26605-8dfc-48cd-a362-1a37c67ea300","Type":"ContainerStarted","Data":"356dfba6041eaa50822378c9d084733e611dc746e56790bfaab83dd3c22ee6da"} Feb 02 11:19:06 crc kubenswrapper[4838]: I0202 11:19:06.316865 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 02 11:19:07 crc kubenswrapper[4838]: I0202 11:19:07.305101 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78d445889f-948j8" event={"ID":"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71","Type":"ContainerStarted","Data":"010eaa33f02c4787fa4116a23dc7014dbc08122d9a29861bd4c02497dc2f3fa3"} Feb 02 11:19:07 crc kubenswrapper[4838]: I0202 11:19:07.306074 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:07 crc kubenswrapper[4838]: I0202 11:19:07.310871 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-5tpqb" event={"ID":"7ce26605-8dfc-48cd-a362-1a37c67ea300","Type":"ContainerStarted","Data":"4823bcd344d625ad561c7b904f665bccc6207c62fe0a225cf8d050bc4a5c40fd"} Feb 02 11:19:07 crc kubenswrapper[4838]: I0202 11:19:07.314544 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"fe57cd8a-a524-426c-a2f4-401cd5642248","Type":"ContainerStarted","Data":"dfbfe4ad33d08b1ddfeaae410707cd02fc2b94f8f2e18b4724d1ac8d5325ac68"} Feb 02 11:19:07 crc kubenswrapper[4838]: I0202 11:19:07.336239 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-78d445889f-948j8" podStartSLOduration=5.33558661 podStartE2EDuration="5.33558661s" podCreationTimestamp="2026-02-02 11:19:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:19:07.326178371 +0000 UTC m=+1541.663279399" watchObservedRunningTime="2026-02-02 11:19:07.33558661 +0000 UTC m=+1541.672687648" Feb 02 11:19:07 crc kubenswrapper[4838]: I0202 11:19:07.363060 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-5tpqb" podStartSLOduration=3.363041016 podStartE2EDuration="3.363041016s" podCreationTimestamp="2026-02-02 11:19:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:19:07.347029983 +0000 UTC m=+1541.684131031" watchObservedRunningTime="2026-02-02 11:19:07.363041016 +0000 UTC m=+1541.700142064" Feb 02 11:19:12 crc kubenswrapper[4838]: I0202 11:19:12.907045 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:12 crc kubenswrapper[4838]: I0202 11:19:12.981440 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-795f4db4bc-ls245"] Feb 02 11:19:12 crc kubenswrapper[4838]: I0202 11:19:12.981910 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-795f4db4bc-ls245" podUID="d60f2880-0bad-4592-a199-c24539da55ab" containerName="dnsmasq-dns" containerID="cri-o://7c3623f669ca887ea25d2b5f033920c745920685ac72a10ae4f491a1d915a005" gracePeriod=10 Feb 02 11:19:13 crc kubenswrapper[4838]: I0202 11:19:13.545796 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cinder-scheduler-0" podUID="2c000131-c578-473f-8758-95ae23e12d3a" containerName="cinder-scheduler" probeResult="failure" output="Get \"http://10.217.0.182:8080/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 11:19:14 crc kubenswrapper[4838]: I0202 11:19:14.387286 4838 generic.go:334] "Generic (PLEG): container finished" podID="d60f2880-0bad-4592-a199-c24539da55ab" containerID="7c3623f669ca887ea25d2b5f033920c745920685ac72a10ae4f491a1d915a005" exitCode=0 Feb 02 11:19:14 crc kubenswrapper[4838]: I0202 11:19:14.387376 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-795f4db4bc-ls245" event={"ID":"d60f2880-0bad-4592-a199-c24539da55ab","Type":"ContainerDied","Data":"7c3623f669ca887ea25d2b5f033920c745920685ac72a10ae4f491a1d915a005"} Feb 02 11:19:14 crc kubenswrapper[4838]: I0202 11:19:14.403999 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"fe57cd8a-a524-426c-a2f4-401cd5642248","Type":"ContainerStarted","Data":"0bf2010f9679b477c73028054895324456adf4dc4d6e3c3615f92504bec5e6e9"} Feb 02 11:19:14 crc kubenswrapper[4838]: E0202 11:19:14.856673 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-nova-api:current-podified" Feb 02 11:19:14 crc kubenswrapper[4838]: E0202 11:19:14.857123 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:nova-api-log,Image:quay.io/podified-antelope-centos9/openstack-nova-api:current-podified,Command:[/usr/bin/dumb-init],Args:[--single-child -- /bin/sh -c /usr/bin/tail -n+1 -F /var/log/nova/nova-api.log 2>/dev/null],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n666h547h5c8hf7h5b5h6chffh5cch5dh559h58fh75h67h65dh665h5d7h6dhbdh5d6hbfh8h565h6dh55ch86h5c9h666h586h677h9bh574h56fq,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/nova,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nhwkz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 8774 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 8774 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42436,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 8774 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:6,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-api-0_openstack(06d2d265-0f5f-4b18-b02f-8c83b6c8d547): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:19:14 crc kubenswrapper[4838]: E0202 11:19:14.859488 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"nova-api-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"nova-api-api\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-nova-api:current-podified\\\"\"]" pod="openstack/nova-api-0" podUID="06d2d265-0f5f-4b18-b02f-8c83b6c8d547" Feb 02 11:19:15 crc kubenswrapper[4838]: I0202 11:19:15.418840 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"fe57cd8a-a524-426c-a2f4-401cd5642248","Type":"ContainerStarted","Data":"a934d415caf5637ad8968b1208d13965a5e45c6d011856ec5858b83f47493434"} Feb 02 11:19:15 crc kubenswrapper[4838]: I0202 11:19:15.419746 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ironic-inspector-0" Feb 02 11:19:15 crc kubenswrapper[4838]: E0202 11:19:15.422084 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"nova-api-log\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-nova-api:current-podified\\\"\", failed to \"StartContainer\" for \"nova-api-api\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-nova-api:current-podified\\\"\"]" pod="openstack/nova-api-0" podUID="06d2d265-0f5f-4b18-b02f-8c83b6c8d547" Feb 02 11:19:15 crc kubenswrapper[4838]: I0202 11:19:15.456043 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ironic-inspector-0" podStartSLOduration=17.456018755 podStartE2EDuration="17.456018755s" podCreationTimestamp="2026-02-02 11:18:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:19:15.445640611 +0000 UTC m=+1549.782741669" watchObservedRunningTime="2026-02-02 11:19:15.456018755 +0000 UTC m=+1549.793119783" Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.025886 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.131815 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgj6g\" (UniqueName: \"kubernetes.io/projected/d60f2880-0bad-4592-a199-c24539da55ab-kube-api-access-tgj6g\") pod \"d60f2880-0bad-4592-a199-c24539da55ab\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.132451 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-ovsdbserver-nb\") pod \"d60f2880-0bad-4592-a199-c24539da55ab\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.132644 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-dns-svc\") pod \"d60f2880-0bad-4592-a199-c24539da55ab\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.132775 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-config\") pod \"d60f2880-0bad-4592-a199-c24539da55ab\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.132935 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-dns-swift-storage-0\") pod \"d60f2880-0bad-4592-a199-c24539da55ab\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.133083 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-ovsdbserver-sb\") pod \"d60f2880-0bad-4592-a199-c24539da55ab\" (UID: \"d60f2880-0bad-4592-a199-c24539da55ab\") " Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.141940 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d60f2880-0bad-4592-a199-c24539da55ab-kube-api-access-tgj6g" (OuterVolumeSpecName: "kube-api-access-tgj6g") pod "d60f2880-0bad-4592-a199-c24539da55ab" (UID: "d60f2880-0bad-4592-a199-c24539da55ab"). InnerVolumeSpecName "kube-api-access-tgj6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.188987 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d60f2880-0bad-4592-a199-c24539da55ab" (UID: "d60f2880-0bad-4592-a199-c24539da55ab"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.191177 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-config" (OuterVolumeSpecName: "config") pod "d60f2880-0bad-4592-a199-c24539da55ab" (UID: "d60f2880-0bad-4592-a199-c24539da55ab"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.195725 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d60f2880-0bad-4592-a199-c24539da55ab" (UID: "d60f2880-0bad-4592-a199-c24539da55ab"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.196851 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d60f2880-0bad-4592-a199-c24539da55ab" (UID: "d60f2880-0bad-4592-a199-c24539da55ab"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.236321 4838 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.236411 4838 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.236445 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.236455 4838 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.236465 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgj6g\" (UniqueName: \"kubernetes.io/projected/d60f2880-0bad-4592-a199-c24539da55ab-kube-api-access-tgj6g\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.264190 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d60f2880-0bad-4592-a199-c24539da55ab" (UID: "d60f2880-0bad-4592-a199-c24539da55ab"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.338154 4838 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d60f2880-0bad-4592-a199-c24539da55ab-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.445233 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-795f4db4bc-ls245" event={"ID":"d60f2880-0bad-4592-a199-c24539da55ab","Type":"ContainerDied","Data":"fb24c97a4f933f0f7e84b2947ab604361d3918c223ba7a4fdab52ed9dd2189ca"} Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.445590 4838 scope.go:117] "RemoveContainer" containerID="7c3623f669ca887ea25d2b5f033920c745920685ac72a10ae4f491a1d915a005" Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.445925 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-795f4db4bc-ls245" Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.487873 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-795f4db4bc-ls245"] Feb 02 11:19:17 crc kubenswrapper[4838]: I0202 11:19:17.497569 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-795f4db4bc-ls245"] Feb 02 11:19:18 crc kubenswrapper[4838]: E0202 11:19:18.029570 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-nova-api:current-podified" Feb 02 11:19:18 crc kubenswrapper[4838]: E0202 11:19:18.030575 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:nova-metadata-log,Image:quay.io/podified-antelope-centos9/openstack-nova-api:current-podified,Command:[/usr/bin/dumb-init],Args:[--single-child -- /bin/sh -c /usr/bin/tail -n+1 -F /var/log/nova/nova-metadata.log 2>/dev/null],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68hf6h5b4h54ch546h688h678hdfhfbhc9h659h586hdh7h66chdh8fh5cbhbch7fh545h565h5bh66fh578h67hf9h698h9fh66h669h5cfq,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/nova,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7qb7k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 8775 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:10,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 8775 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42436,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 8775 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:6,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-metadata-0_openstack(b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:19:18 crc kubenswrapper[4838]: E0202 11:19:18.035206 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"nova-metadata-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"nova-metadata-metadata\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-nova-api:current-podified\\\"\"]" pod="openstack/nova-metadata-0" podUID="b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe" Feb 02 11:19:18 crc kubenswrapper[4838]: I0202 11:19:18.516668 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d60f2880-0bad-4592-a199-c24539da55ab" path="/var/lib/kubelet/pods/d60f2880-0bad-4592-a199-c24539da55ab/volumes" Feb 02 11:19:18 crc kubenswrapper[4838]: I0202 11:19:18.573987 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ironic-inspector-0" Feb 02 11:19:18 crc kubenswrapper[4838]: I0202 11:19:18.574044 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ironic-inspector-0" Feb 02 11:19:18 crc kubenswrapper[4838]: I0202 11:19:18.574064 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ironic-inspector-0" Feb 02 11:19:18 crc kubenswrapper[4838]: I0202 11:19:18.574074 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ironic-inspector-0" Feb 02 11:19:18 crc kubenswrapper[4838]: I0202 11:19:18.583025 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ironic-inspector-0" Feb 02 11:19:18 crc kubenswrapper[4838]: I0202 11:19:18.589870 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cinder-scheduler-0" podUID="2c000131-c578-473f-8758-95ae23e12d3a" containerName="cinder-scheduler" probeResult="failure" output="Get \"http://10.217.0.182:8080/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 11:19:18 crc kubenswrapper[4838]: I0202 11:19:18.622320 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ironic-inspector-0" Feb 02 11:19:18 crc kubenswrapper[4838]: I0202 11:19:18.623388 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ironic-inspector-0" Feb 02 11:19:19 crc kubenswrapper[4838]: E0202 11:19:19.003356 4838 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified" Feb 02 11:19:19 crc kubenswrapper[4838]: E0202 11:19:19.003530 4838 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:nova-scheduler-scheduler,Image:quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n588h5dfhc4h5f9h75h5ddh59bh574h575hbdh68dh5f8h664h8fh8dh94h64bh7dh5f7h5f8h5cbh57fhch648hd4hfh95h55dh557hd9h5c4h55dq,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/kolla/config_files/config.json,SubPath:nova-scheduler-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qwr5t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pgrep -r DRST nova-scheduler],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:10,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pgrep -r DRST nova-scheduler],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42436,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pgrep -r DRST nova-scheduler],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:6,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-scheduler-0_openstack(f97c9e6f-a864-4903-b775-0cef0afed268): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 11:19:19 crc kubenswrapper[4838]: E0202 11:19:19.004691 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-scheduler-scheduler\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/nova-scheduler-0" podUID="f97c9e6f-a864-4903-b775-0cef0afed268" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.304110 4838 scope.go:117] "RemoveContainer" containerID="96a3b1a7488871cbb08e673c4e79ceb4d709eb0991d160fa636514d7613c68ca" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.392502 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.470787 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.470770 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe","Type":"ContainerDied","Data":"ad8f2af39fe2261fc99a91129bab044992d92866641413380415e20a066898a1"} Feb 02 11:19:19 crc kubenswrapper[4838]: E0202 11:19:19.473830 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-scheduler-scheduler\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified\\\"\"" pod="openstack/nova-scheduler-0" podUID="f97c9e6f-a864-4903-b775-0cef0afed268" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.480286 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ironic-inspector-0" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.483316 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ironic-inspector-0" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.484886 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-config-data\") pod \"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe\" (UID: \"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe\") " Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.485058 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-combined-ca-bundle\") pod \"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe\" (UID: \"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe\") " Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.485096 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qb7k\" (UniqueName: \"kubernetes.io/projected/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-kube-api-access-7qb7k\") pod \"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe\" (UID: \"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe\") " Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.485187 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-logs\") pod \"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe\" (UID: \"b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe\") " Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.485539 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-logs" (OuterVolumeSpecName: "logs") pod "b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe" (UID: "b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.486216 4838 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-logs\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.495237 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe" (UID: "b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.497076 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-config-data" (OuterVolumeSpecName: "config-data") pod "b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe" (UID: "b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.531134 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-kube-api-access-7qb7k" (OuterVolumeSpecName: "kube-api-access-7qb7k") pod "b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe" (UID: "b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe"). InnerVolumeSpecName "kube-api-access-7qb7k". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.589952 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.589997 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.590011 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qb7k\" (UniqueName: \"kubernetes.io/projected/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe-kube-api-access-7qb7k\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.823069 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.831357 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.849147 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:19:19 crc kubenswrapper[4838]: E0202 11:19:19.849742 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d60f2880-0bad-4592-a199-c24539da55ab" containerName="dnsmasq-dns" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.849785 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="d60f2880-0bad-4592-a199-c24539da55ab" containerName="dnsmasq-dns" Feb 02 11:19:19 crc kubenswrapper[4838]: E0202 11:19:19.849812 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d60f2880-0bad-4592-a199-c24539da55ab" containerName="init" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.849819 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="d60f2880-0bad-4592-a199-c24539da55ab" containerName="init" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.850096 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="d60f2880-0bad-4592-a199-c24539da55ab" containerName="dnsmasq-dns" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.856182 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.860359 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.860828 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.887956 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.898898 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57bf0b31-2ff8-44ad-a509-be8868430dd2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"57bf0b31-2ff8-44ad-a509-be8868430dd2\") " pod="openstack/nova-metadata-0" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.899289 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/57bf0b31-2ff8-44ad-a509-be8868430dd2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"57bf0b31-2ff8-44ad-a509-be8868430dd2\") " pod="openstack/nova-metadata-0" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.899355 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbql6\" (UniqueName: \"kubernetes.io/projected/57bf0b31-2ff8-44ad-a509-be8868430dd2-kube-api-access-sbql6\") pod \"nova-metadata-0\" (UID: \"57bf0b31-2ff8-44ad-a509-be8868430dd2\") " pod="openstack/nova-metadata-0" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.899418 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57bf0b31-2ff8-44ad-a509-be8868430dd2-logs\") pod \"nova-metadata-0\" (UID: \"57bf0b31-2ff8-44ad-a509-be8868430dd2\") " pod="openstack/nova-metadata-0" Feb 02 11:19:19 crc kubenswrapper[4838]: I0202 11:19:19.899445 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57bf0b31-2ff8-44ad-a509-be8868430dd2-config-data\") pod \"nova-metadata-0\" (UID: \"57bf0b31-2ff8-44ad-a509-be8868430dd2\") " pod="openstack/nova-metadata-0" Feb 02 11:19:20 crc kubenswrapper[4838]: I0202 11:19:20.001334 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/57bf0b31-2ff8-44ad-a509-be8868430dd2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"57bf0b31-2ff8-44ad-a509-be8868430dd2\") " pod="openstack/nova-metadata-0" Feb 02 11:19:20 crc kubenswrapper[4838]: I0202 11:19:20.001449 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbql6\" (UniqueName: \"kubernetes.io/projected/57bf0b31-2ff8-44ad-a509-be8868430dd2-kube-api-access-sbql6\") pod \"nova-metadata-0\" (UID: \"57bf0b31-2ff8-44ad-a509-be8868430dd2\") " pod="openstack/nova-metadata-0" Feb 02 11:19:20 crc kubenswrapper[4838]: I0202 11:19:20.001520 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57bf0b31-2ff8-44ad-a509-be8868430dd2-logs\") pod \"nova-metadata-0\" (UID: \"57bf0b31-2ff8-44ad-a509-be8868430dd2\") " pod="openstack/nova-metadata-0" Feb 02 11:19:20 crc kubenswrapper[4838]: I0202 11:19:20.001549 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57bf0b31-2ff8-44ad-a509-be8868430dd2-config-data\") pod \"nova-metadata-0\" (UID: \"57bf0b31-2ff8-44ad-a509-be8868430dd2\") " pod="openstack/nova-metadata-0" Feb 02 11:19:20 crc kubenswrapper[4838]: I0202 11:19:20.001643 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57bf0b31-2ff8-44ad-a509-be8868430dd2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"57bf0b31-2ff8-44ad-a509-be8868430dd2\") " pod="openstack/nova-metadata-0" Feb 02 11:19:20 crc kubenswrapper[4838]: I0202 11:19:20.002409 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57bf0b31-2ff8-44ad-a509-be8868430dd2-logs\") pod \"nova-metadata-0\" (UID: \"57bf0b31-2ff8-44ad-a509-be8868430dd2\") " pod="openstack/nova-metadata-0" Feb 02 11:19:20 crc kubenswrapper[4838]: I0202 11:19:20.007936 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57bf0b31-2ff8-44ad-a509-be8868430dd2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"57bf0b31-2ff8-44ad-a509-be8868430dd2\") " pod="openstack/nova-metadata-0" Feb 02 11:19:20 crc kubenswrapper[4838]: I0202 11:19:20.008480 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57bf0b31-2ff8-44ad-a509-be8868430dd2-config-data\") pod \"nova-metadata-0\" (UID: \"57bf0b31-2ff8-44ad-a509-be8868430dd2\") " pod="openstack/nova-metadata-0" Feb 02 11:19:20 crc kubenswrapper[4838]: I0202 11:19:20.008844 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/57bf0b31-2ff8-44ad-a509-be8868430dd2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"57bf0b31-2ff8-44ad-a509-be8868430dd2\") " pod="openstack/nova-metadata-0" Feb 02 11:19:20 crc kubenswrapper[4838]: I0202 11:19:20.022004 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbql6\" (UniqueName: \"kubernetes.io/projected/57bf0b31-2ff8-44ad-a509-be8868430dd2-kube-api-access-sbql6\") pod \"nova-metadata-0\" (UID: \"57bf0b31-2ff8-44ad-a509-be8868430dd2\") " pod="openstack/nova-metadata-0" Feb 02 11:19:20 crc kubenswrapper[4838]: I0202 11:19:20.175649 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 11:19:20 crc kubenswrapper[4838]: I0202 11:19:20.517915 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe" path="/var/lib/kubelet/pods/b8bb7f20-b07d-49a4-b9d0-7e1cc5f8f6fe/volumes" Feb 02 11:19:20 crc kubenswrapper[4838]: I0202 11:19:20.667082 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:19:20 crc kubenswrapper[4838]: W0202 11:19:20.679988 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod57bf0b31_2ff8_44ad_a509_be8868430dd2.slice/crio-c9abda5bfd3c38cb8ce3f3904188986a22cbb8dabd7ceb51b651ade538eed826 WatchSource:0}: Error finding container c9abda5bfd3c38cb8ce3f3904188986a22cbb8dabd7ceb51b651ade538eed826: Status 404 returned error can't find the container with id c9abda5bfd3c38cb8ce3f3904188986a22cbb8dabd7ceb51b651ade538eed826 Feb 02 11:19:21 crc kubenswrapper[4838]: I0202 11:19:21.491200 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"57bf0b31-2ff8-44ad-a509-be8868430dd2","Type":"ContainerStarted","Data":"c9abda5bfd3c38cb8ce3f3904188986a22cbb8dabd7ceb51b651ade538eed826"} Feb 02 11:19:21 crc kubenswrapper[4838]: I0202 11:19:21.596455 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-795f4db4bc-ls245" podUID="d60f2880-0bad-4592-a199-c24539da55ab" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.179:5353: i/o timeout" Feb 02 11:19:23 crc kubenswrapper[4838]: I0202 11:19:23.514832 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"97a5f67e-673d-47c3-826d-75f217906282","Type":"ContainerStarted","Data":"458c7590961d20f9c73148037bc4e0fdfac81792313d161742c1225e964a45db"} Feb 02 11:19:23 crc kubenswrapper[4838]: I0202 11:19:23.515333 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="97a5f67e-673d-47c3-826d-75f217906282" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://458c7590961d20f9c73148037bc4e0fdfac81792313d161742c1225e964a45db" gracePeriod=30 Feb 02 11:19:23 crc kubenswrapper[4838]: I0202 11:19:23.523421 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"57bf0b31-2ff8-44ad-a509-be8868430dd2","Type":"ContainerStarted","Data":"c51e726089d6adaff1969988117dbbc49db89913e29199c706a61d4438005328"} Feb 02 11:19:23 crc kubenswrapper[4838]: I0202 11:19:23.523495 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"57bf0b31-2ff8-44ad-a509-be8868430dd2","Type":"ContainerStarted","Data":"38f8ef9d6c64c283d622e3d34cbab7d94688ae6d7ba92a91f84e63dc4f5052aa"} Feb 02 11:19:23 crc kubenswrapper[4838]: I0202 11:19:23.549966 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.963551533 podStartE2EDuration="21.54992833s" podCreationTimestamp="2026-02-02 11:19:02 +0000 UTC" firstStartedPulling="2026-02-02 11:19:04.197924324 +0000 UTC m=+1538.535025352" lastFinishedPulling="2026-02-02 11:19:22.784301121 +0000 UTC m=+1557.121402149" observedRunningTime="2026-02-02 11:19:23.545319558 +0000 UTC m=+1557.882420596" watchObservedRunningTime="2026-02-02 11:19:23.54992833 +0000 UTC m=+1557.887029358" Feb 02 11:19:23 crc kubenswrapper[4838]: I0202 11:19:23.591954 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.488439812 podStartE2EDuration="4.591928299s" podCreationTimestamp="2026-02-02 11:19:19 +0000 UTC" firstStartedPulling="2026-02-02 11:19:20.682001135 +0000 UTC m=+1555.019102163" lastFinishedPulling="2026-02-02 11:19:22.785489622 +0000 UTC m=+1557.122590650" observedRunningTime="2026-02-02 11:19:23.573276632 +0000 UTC m=+1557.910377680" watchObservedRunningTime="2026-02-02 11:19:23.591928299 +0000 UTC m=+1557.929029327" Feb 02 11:19:25 crc kubenswrapper[4838]: I0202 11:19:25.176534 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 11:19:25 crc kubenswrapper[4838]: I0202 11:19:25.176979 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 11:19:26 crc kubenswrapper[4838]: I0202 11:19:26.550803 4838 generic.go:334] "Generic (PLEG): container finished" podID="9f9f491b-916e-4236-96b5-8da53babac04" containerID="43c015336232612c23c346095e9d080a828e7b82c35111545a72241601c2a52f" exitCode=0 Feb 02 11:19:26 crc kubenswrapper[4838]: I0202 11:19:26.550880 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-2d8xd" event={"ID":"9f9f491b-916e-4236-96b5-8da53babac04","Type":"ContainerDied","Data":"43c015336232612c23c346095e9d080a828e7b82c35111545a72241601c2a52f"} Feb 02 11:19:27 crc kubenswrapper[4838]: I0202 11:19:27.170154 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:19:27 crc kubenswrapper[4838]: I0202 11:19:27.170768 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" containerName="ceilometer-central-agent" containerID="cri-o://81d235f814a1853993fc8a93e5e2fcd2d6f606b6a9168da6972bf64ba8ac9164" gracePeriod=30 Feb 02 11:19:27 crc kubenswrapper[4838]: I0202 11:19:27.170826 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" containerName="proxy-httpd" containerID="cri-o://d7d5343b65a57e6bc9243a2d381186e1b1cc09829c85f0e0377e8038c6d5b217" gracePeriod=30 Feb 02 11:19:27 crc kubenswrapper[4838]: I0202 11:19:27.170892 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" containerName="ceilometer-notification-agent" containerID="cri-o://b78a230bd6b7841ac2d8cc3e207e9220466f64b3d78b9e04be7d93d07cfbc61a" gracePeriod=30 Feb 02 11:19:27 crc kubenswrapper[4838]: I0202 11:19:27.170958 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" containerName="sg-core" containerID="cri-o://18cc5dfadc68883ae64300002aa4f2a2b79b50fd4225429116d760319e35826d" gracePeriod=30 Feb 02 11:19:27 crc kubenswrapper[4838]: I0202 11:19:27.576869 4838 generic.go:334] "Generic (PLEG): container finished" podID="7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" containerID="d7d5343b65a57e6bc9243a2d381186e1b1cc09829c85f0e0377e8038c6d5b217" exitCode=0 Feb 02 11:19:27 crc kubenswrapper[4838]: I0202 11:19:27.577170 4838 generic.go:334] "Generic (PLEG): container finished" podID="7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" containerID="18cc5dfadc68883ae64300002aa4f2a2b79b50fd4225429116d760319e35826d" exitCode=2 Feb 02 11:19:27 crc kubenswrapper[4838]: I0202 11:19:27.576920 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa","Type":"ContainerDied","Data":"d7d5343b65a57e6bc9243a2d381186e1b1cc09829c85f0e0377e8038c6d5b217"} Feb 02 11:19:27 crc kubenswrapper[4838]: I0202 11:19:27.577286 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa","Type":"ContainerDied","Data":"18cc5dfadc68883ae64300002aa4f2a2b79b50fd4225429116d760319e35826d"} Feb 02 11:19:27 crc kubenswrapper[4838]: I0202 11:19:27.867192 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.091045 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-2d8xd" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.169159 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cxl9q"] Feb 02 11:19:28 crc kubenswrapper[4838]: E0202 11:19:28.169679 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f9f491b-916e-4236-96b5-8da53babac04" containerName="nova-manage" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.169705 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f9f491b-916e-4236-96b5-8da53babac04" containerName="nova-manage" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.169939 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f9f491b-916e-4236-96b5-8da53babac04" containerName="nova-manage" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.172050 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cxl9q" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.193446 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cxl9q"] Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.210068 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2wvqq\" (UniqueName: \"kubernetes.io/projected/9f9f491b-916e-4236-96b5-8da53babac04-kube-api-access-2wvqq\") pod \"9f9f491b-916e-4236-96b5-8da53babac04\" (UID: \"9f9f491b-916e-4236-96b5-8da53babac04\") " Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.210140 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f9f491b-916e-4236-96b5-8da53babac04-scripts\") pod \"9f9f491b-916e-4236-96b5-8da53babac04\" (UID: \"9f9f491b-916e-4236-96b5-8da53babac04\") " Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.210317 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f9f491b-916e-4236-96b5-8da53babac04-combined-ca-bundle\") pod \"9f9f491b-916e-4236-96b5-8da53babac04\" (UID: \"9f9f491b-916e-4236-96b5-8da53babac04\") " Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.210362 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f9f491b-916e-4236-96b5-8da53babac04-config-data\") pod \"9f9f491b-916e-4236-96b5-8da53babac04\" (UID: \"9f9f491b-916e-4236-96b5-8da53babac04\") " Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.216988 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f9f491b-916e-4236-96b5-8da53babac04-scripts" (OuterVolumeSpecName: "scripts") pod "9f9f491b-916e-4236-96b5-8da53babac04" (UID: "9f9f491b-916e-4236-96b5-8da53babac04"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.219361 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f9f491b-916e-4236-96b5-8da53babac04-kube-api-access-2wvqq" (OuterVolumeSpecName: "kube-api-access-2wvqq") pod "9f9f491b-916e-4236-96b5-8da53babac04" (UID: "9f9f491b-916e-4236-96b5-8da53babac04"). InnerVolumeSpecName "kube-api-access-2wvqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.248639 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f9f491b-916e-4236-96b5-8da53babac04-config-data" (OuterVolumeSpecName: "config-data") pod "9f9f491b-916e-4236-96b5-8da53babac04" (UID: "9f9f491b-916e-4236-96b5-8da53babac04"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.271701 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f9f491b-916e-4236-96b5-8da53babac04-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9f9f491b-916e-4236-96b5-8da53babac04" (UID: "9f9f491b-916e-4236-96b5-8da53babac04"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.312662 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81309430-b454-4c48-95da-f3dbed0ad937-utilities\") pod \"certified-operators-cxl9q\" (UID: \"81309430-b454-4c48-95da-f3dbed0ad937\") " pod="openshift-marketplace/certified-operators-cxl9q" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.312802 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81309430-b454-4c48-95da-f3dbed0ad937-catalog-content\") pod \"certified-operators-cxl9q\" (UID: \"81309430-b454-4c48-95da-f3dbed0ad937\") " pod="openshift-marketplace/certified-operators-cxl9q" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.312829 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjjtj\" (UniqueName: \"kubernetes.io/projected/81309430-b454-4c48-95da-f3dbed0ad937-kube-api-access-rjjtj\") pod \"certified-operators-cxl9q\" (UID: \"81309430-b454-4c48-95da-f3dbed0ad937\") " pod="openshift-marketplace/certified-operators-cxl9q" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.312908 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f9f491b-916e-4236-96b5-8da53babac04-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.312923 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2wvqq\" (UniqueName: \"kubernetes.io/projected/9f9f491b-916e-4236-96b5-8da53babac04-kube-api-access-2wvqq\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.312935 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f9f491b-916e-4236-96b5-8da53babac04-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.312948 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f9f491b-916e-4236-96b5-8da53babac04-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.414218 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81309430-b454-4c48-95da-f3dbed0ad937-catalog-content\") pod \"certified-operators-cxl9q\" (UID: \"81309430-b454-4c48-95da-f3dbed0ad937\") " pod="openshift-marketplace/certified-operators-cxl9q" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.414273 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjjtj\" (UniqueName: \"kubernetes.io/projected/81309430-b454-4c48-95da-f3dbed0ad937-kube-api-access-rjjtj\") pod \"certified-operators-cxl9q\" (UID: \"81309430-b454-4c48-95da-f3dbed0ad937\") " pod="openshift-marketplace/certified-operators-cxl9q" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.414393 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81309430-b454-4c48-95da-f3dbed0ad937-utilities\") pod \"certified-operators-cxl9q\" (UID: \"81309430-b454-4c48-95da-f3dbed0ad937\") " pod="openshift-marketplace/certified-operators-cxl9q" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.415043 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81309430-b454-4c48-95da-f3dbed0ad937-utilities\") pod \"certified-operators-cxl9q\" (UID: \"81309430-b454-4c48-95da-f3dbed0ad937\") " pod="openshift-marketplace/certified-operators-cxl9q" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.415244 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81309430-b454-4c48-95da-f3dbed0ad937-catalog-content\") pod \"certified-operators-cxl9q\" (UID: \"81309430-b454-4c48-95da-f3dbed0ad937\") " pod="openshift-marketplace/certified-operators-cxl9q" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.435766 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjjtj\" (UniqueName: \"kubernetes.io/projected/81309430-b454-4c48-95da-f3dbed0ad937-kube-api-access-rjjtj\") pod \"certified-operators-cxl9q\" (UID: \"81309430-b454-4c48-95da-f3dbed0ad937\") " pod="openshift-marketplace/certified-operators-cxl9q" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.498120 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cxl9q" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.594429 4838 generic.go:334] "Generic (PLEG): container finished" podID="dc01012d-a1bc-4849-8cc3-c0b3fc3f5504" containerID="1d5cbeee6e023cbfb8e09f59ee31635ff2d97ec33d9dd5982af2a2591a8392af" exitCode=0 Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.594521 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-m9rhm" event={"ID":"dc01012d-a1bc-4849-8cc3-c0b3fc3f5504","Type":"ContainerDied","Data":"1d5cbeee6e023cbfb8e09f59ee31635ff2d97ec33d9dd5982af2a2591a8392af"} Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.603567 4838 generic.go:334] "Generic (PLEG): container finished" podID="7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" containerID="81d235f814a1853993fc8a93e5e2fcd2d6f606b6a9168da6972bf64ba8ac9164" exitCode=0 Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.603681 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa","Type":"ContainerDied","Data":"81d235f814a1853993fc8a93e5e2fcd2d6f606b6a9168da6972bf64ba8ac9164"} Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.611558 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-2d8xd" event={"ID":"9f9f491b-916e-4236-96b5-8da53babac04","Type":"ContainerDied","Data":"1c67450f6a87b6b56d33c9406dd6782b603d142421c0269b7dcd1f199fb6d160"} Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.612084 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c67450f6a87b6b56d33c9406dd6782b603d142421c0269b7dcd1f199fb6d160" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.612169 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-2d8xd" Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.811478 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.831694 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.840330 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.840567 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="57bf0b31-2ff8-44ad-a509-be8868430dd2" containerName="nova-metadata-log" containerID="cri-o://38f8ef9d6c64c283d622e3d34cbab7d94688ae6d7ba92a91f84e63dc4f5052aa" gracePeriod=30 Feb 02 11:19:28 crc kubenswrapper[4838]: I0202 11:19:28.840731 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="57bf0b31-2ff8-44ad-a509-be8868430dd2" containerName="nova-metadata-metadata" containerID="cri-o://c51e726089d6adaff1969988117dbbc49db89913e29199c706a61d4438005328" gracePeriod=30 Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.085415 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cxl9q"] Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.607385 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.688467 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qwr5t\" (UniqueName: \"kubernetes.io/projected/f97c9e6f-a864-4903-b775-0cef0afed268-kube-api-access-qwr5t\") pod \"f97c9e6f-a864-4903-b775-0cef0afed268\" (UID: \"f97c9e6f-a864-4903-b775-0cef0afed268\") " Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.688550 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f97c9e6f-a864-4903-b775-0cef0afed268-combined-ca-bundle\") pod \"f97c9e6f-a864-4903-b775-0cef0afed268\" (UID: \"f97c9e6f-a864-4903-b775-0cef0afed268\") " Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.688608 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f97c9e6f-a864-4903-b775-0cef0afed268-config-data\") pod \"f97c9e6f-a864-4903-b775-0cef0afed268\" (UID: \"f97c9e6f-a864-4903-b775-0cef0afed268\") " Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.691005 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.695835 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f97c9e6f-a864-4903-b775-0cef0afed268-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f97c9e6f-a864-4903-b775-0cef0afed268" (UID: "f97c9e6f-a864-4903-b775-0cef0afed268"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.701716 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxl9q" event={"ID":"81309430-b454-4c48-95da-f3dbed0ad937","Type":"ContainerStarted","Data":"02d6c25e0687a436d3f24eebf3c27c8a62c3e9f0297686c2354ec69403eee35e"} Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.712874 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f97c9e6f-a864-4903-b775-0cef0afed268-config-data" (OuterVolumeSpecName: "config-data") pod "f97c9e6f-a864-4903-b775-0cef0afed268" (UID: "f97c9e6f-a864-4903-b775-0cef0afed268"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.713692 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f97c9e6f-a864-4903-b775-0cef0afed268-kube-api-access-qwr5t" (OuterVolumeSpecName: "kube-api-access-qwr5t") pod "f97c9e6f-a864-4903-b775-0cef0afed268" (UID: "f97c9e6f-a864-4903-b775-0cef0afed268"). InnerVolumeSpecName "kube-api-access-qwr5t". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.733806 4838 generic.go:334] "Generic (PLEG): container finished" podID="7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" containerID="b78a230bd6b7841ac2d8cc3e207e9220466f64b3d78b9e04be7d93d07cfbc61a" exitCode=0 Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.734098 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa","Type":"ContainerDied","Data":"b78a230bd6b7841ac2d8cc3e207e9220466f64b3d78b9e04be7d93d07cfbc61a"} Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.744066 4838 generic.go:334] "Generic (PLEG): container finished" podID="57bf0b31-2ff8-44ad-a509-be8868430dd2" containerID="c51e726089d6adaff1969988117dbbc49db89913e29199c706a61d4438005328" exitCode=0 Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.744276 4838 generic.go:334] "Generic (PLEG): container finished" podID="57bf0b31-2ff8-44ad-a509-be8868430dd2" containerID="38f8ef9d6c64c283d622e3d34cbab7d94688ae6d7ba92a91f84e63dc4f5052aa" exitCode=143 Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.744383 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"57bf0b31-2ff8-44ad-a509-be8868430dd2","Type":"ContainerDied","Data":"c51e726089d6adaff1969988117dbbc49db89913e29199c706a61d4438005328"} Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.744465 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"57bf0b31-2ff8-44ad-a509-be8868430dd2","Type":"ContainerDied","Data":"38f8ef9d6c64c283d622e3d34cbab7d94688ae6d7ba92a91f84e63dc4f5052aa"} Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.746256 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"06d2d265-0f5f-4b18-b02f-8c83b6c8d547","Type":"ContainerDied","Data":"ead273bbad5a08da7b91d8fa3e91c482b2e6b3afa36ceda81af88d09f8948be5"} Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.746421 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.751752 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f97c9e6f-a864-4903-b775-0cef0afed268","Type":"ContainerDied","Data":"9ea1b9d4ee7edbd1db5e014b145cc700a409a72d3e718899e8b31f0676d868d7"} Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.751820 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.790980 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-logs\") pod \"06d2d265-0f5f-4b18-b02f-8c83b6c8d547\" (UID: \"06d2d265-0f5f-4b18-b02f-8c83b6c8d547\") " Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.791033 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nhwkz\" (UniqueName: \"kubernetes.io/projected/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-kube-api-access-nhwkz\") pod \"06d2d265-0f5f-4b18-b02f-8c83b6c8d547\" (UID: \"06d2d265-0f5f-4b18-b02f-8c83b6c8d547\") " Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.791078 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-config-data\") pod \"06d2d265-0f5f-4b18-b02f-8c83b6c8d547\" (UID: \"06d2d265-0f5f-4b18-b02f-8c83b6c8d547\") " Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.791250 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-combined-ca-bundle\") pod \"06d2d265-0f5f-4b18-b02f-8c83b6c8d547\" (UID: \"06d2d265-0f5f-4b18-b02f-8c83b6c8d547\") " Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.791773 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qwr5t\" (UniqueName: \"kubernetes.io/projected/f97c9e6f-a864-4903-b775-0cef0afed268-kube-api-access-qwr5t\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.791790 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f97c9e6f-a864-4903-b775-0cef0afed268-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.791803 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f97c9e6f-a864-4903-b775-0cef0afed268-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.791921 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-logs" (OuterVolumeSpecName: "logs") pod "06d2d265-0f5f-4b18-b02f-8c83b6c8d547" (UID: "06d2d265-0f5f-4b18-b02f-8c83b6c8d547"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.801210 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-config-data" (OuterVolumeSpecName: "config-data") pod "06d2d265-0f5f-4b18-b02f-8c83b6c8d547" (UID: "06d2d265-0f5f-4b18-b02f-8c83b6c8d547"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.801972 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-kube-api-access-nhwkz" (OuterVolumeSpecName: "kube-api-access-nhwkz") pod "06d2d265-0f5f-4b18-b02f-8c83b6c8d547" (UID: "06d2d265-0f5f-4b18-b02f-8c83b6c8d547"). InnerVolumeSpecName "kube-api-access-nhwkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.802111 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "06d2d265-0f5f-4b18-b02f-8c83b6c8d547" (UID: "06d2d265-0f5f-4b18-b02f-8c83b6c8d547"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.866385 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.893216 4838 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-logs\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.893255 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nhwkz\" (UniqueName: \"kubernetes.io/projected/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-kube-api-access-nhwkz\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.893268 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.893276 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06d2d265-0f5f-4b18-b02f-8c83b6c8d547-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.896826 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.939006 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.956342 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 11:19:29 crc kubenswrapper[4838]: E0202 11:19:29.956936 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57bf0b31-2ff8-44ad-a509-be8868430dd2" containerName="nova-metadata-log" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.956962 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="57bf0b31-2ff8-44ad-a509-be8868430dd2" containerName="nova-metadata-log" Feb 02 11:19:29 crc kubenswrapper[4838]: E0202 11:19:29.956998 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57bf0b31-2ff8-44ad-a509-be8868430dd2" containerName="nova-metadata-metadata" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.957021 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="57bf0b31-2ff8-44ad-a509-be8868430dd2" containerName="nova-metadata-metadata" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.957237 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="57bf0b31-2ff8-44ad-a509-be8868430dd2" containerName="nova-metadata-metadata" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.957266 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="57bf0b31-2ff8-44ad-a509-be8868430dd2" containerName="nova-metadata-log" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.958021 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.960653 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.975544 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.993883 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57bf0b31-2ff8-44ad-a509-be8868430dd2-logs\") pod \"57bf0b31-2ff8-44ad-a509-be8868430dd2\" (UID: \"57bf0b31-2ff8-44ad-a509-be8868430dd2\") " Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.994200 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/57bf0b31-2ff8-44ad-a509-be8868430dd2-nova-metadata-tls-certs\") pod \"57bf0b31-2ff8-44ad-a509-be8868430dd2\" (UID: \"57bf0b31-2ff8-44ad-a509-be8868430dd2\") " Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.994358 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57bf0b31-2ff8-44ad-a509-be8868430dd2-combined-ca-bundle\") pod \"57bf0b31-2ff8-44ad-a509-be8868430dd2\" (UID: \"57bf0b31-2ff8-44ad-a509-be8868430dd2\") " Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.994523 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57bf0b31-2ff8-44ad-a509-be8868430dd2-config-data\") pod \"57bf0b31-2ff8-44ad-a509-be8868430dd2\" (UID: \"57bf0b31-2ff8-44ad-a509-be8868430dd2\") " Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.994667 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sbql6\" (UniqueName: \"kubernetes.io/projected/57bf0b31-2ff8-44ad-a509-be8868430dd2-kube-api-access-sbql6\") pod \"57bf0b31-2ff8-44ad-a509-be8868430dd2\" (UID: \"57bf0b31-2ff8-44ad-a509-be8868430dd2\") " Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.994520 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57bf0b31-2ff8-44ad-a509-be8868430dd2-logs" (OuterVolumeSpecName: "logs") pod "57bf0b31-2ff8-44ad-a509-be8868430dd2" (UID: "57bf0b31-2ff8-44ad-a509-be8868430dd2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:19:29 crc kubenswrapper[4838]: I0202 11:19:29.999095 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57bf0b31-2ff8-44ad-a509-be8868430dd2-kube-api-access-sbql6" (OuterVolumeSpecName: "kube-api-access-sbql6") pod "57bf0b31-2ff8-44ad-a509-be8868430dd2" (UID: "57bf0b31-2ff8-44ad-a509-be8868430dd2"). InnerVolumeSpecName "kube-api-access-sbql6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.022977 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.028110 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57bf0b31-2ff8-44ad-a509-be8868430dd2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "57bf0b31-2ff8-44ad-a509-be8868430dd2" (UID: "57bf0b31-2ff8-44ad-a509-be8868430dd2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.030800 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57bf0b31-2ff8-44ad-a509-be8868430dd2-config-data" (OuterVolumeSpecName: "config-data") pod "57bf0b31-2ff8-44ad-a509-be8868430dd2" (UID: "57bf0b31-2ff8-44ad-a509-be8868430dd2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.070324 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57bf0b31-2ff8-44ad-a509-be8868430dd2-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "57bf0b31-2ff8-44ad-a509-be8868430dd2" (UID: "57bf0b31-2ff8-44ad-a509-be8868430dd2"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.096871 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-scripts\") pod \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.096970 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-run-httpd\") pod \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.097034 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-sg-core-conf-yaml\") pod \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.097113 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-combined-ca-bundle\") pod \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.097143 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-ceilometer-tls-certs\") pod \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.097273 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-log-httpd\") pod \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.097339 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2x65g\" (UniqueName: \"kubernetes.io/projected/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-kube-api-access-2x65g\") pod \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.097371 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-config-data\") pod \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\" (UID: \"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa\") " Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.098276 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" (UID: "7bc80d3e-05b1-4bfa-9c94-3d7c162420aa"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.098702 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzsbb\" (UniqueName: \"kubernetes.io/projected/a451fb50-63dc-4dcc-8cf0-37e7f3d99888-kube-api-access-lzsbb\") pod \"nova-scheduler-0\" (UID: \"a451fb50-63dc-4dcc-8cf0-37e7f3d99888\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.098893 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a451fb50-63dc-4dcc-8cf0-37e7f3d99888-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a451fb50-63dc-4dcc-8cf0-37e7f3d99888\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.098971 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a451fb50-63dc-4dcc-8cf0-37e7f3d99888-config-data\") pod \"nova-scheduler-0\" (UID: \"a451fb50-63dc-4dcc-8cf0-37e7f3d99888\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.099233 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57bf0b31-2ff8-44ad-a509-be8868430dd2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.099302 4838 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.099395 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57bf0b31-2ff8-44ad-a509-be8868430dd2-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.099452 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sbql6\" (UniqueName: \"kubernetes.io/projected/57bf0b31-2ff8-44ad-a509-be8868430dd2-kube-api-access-sbql6\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.099511 4838 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57bf0b31-2ff8-44ad-a509-be8868430dd2-logs\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.099572 4838 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/57bf0b31-2ff8-44ad-a509-be8868430dd2-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.099435 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" (UID: "7bc80d3e-05b1-4bfa-9c94-3d7c162420aa"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.104259 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-scripts" (OuterVolumeSpecName: "scripts") pod "7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" (UID: "7bc80d3e-05b1-4bfa-9c94-3d7c162420aa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.126916 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-kube-api-access-2x65g" (OuterVolumeSpecName: "kube-api-access-2x65g") pod "7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" (UID: "7bc80d3e-05b1-4bfa-9c94-3d7c162420aa"). InnerVolumeSpecName "kube-api-access-2x65g". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.137977 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.156532 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.158757 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" (UID: "7bc80d3e-05b1-4bfa-9c94-3d7c162420aa"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.169266 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 02 11:19:30 crc kubenswrapper[4838]: E0202 11:19:30.171302 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" containerName="ceilometer-notification-agent" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.171320 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" containerName="ceilometer-notification-agent" Feb 02 11:19:30 crc kubenswrapper[4838]: E0202 11:19:30.171339 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" containerName="ceilometer-central-agent" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.171348 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" containerName="ceilometer-central-agent" Feb 02 11:19:30 crc kubenswrapper[4838]: E0202 11:19:30.171368 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" containerName="proxy-httpd" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.171375 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" containerName="proxy-httpd" Feb 02 11:19:30 crc kubenswrapper[4838]: E0202 11:19:30.171388 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" containerName="sg-core" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.171394 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" containerName="sg-core" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.171593 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" containerName="ceilometer-central-agent" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.171608 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" containerName="ceilometer-notification-agent" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.171641 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" containerName="sg-core" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.171653 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" containerName="proxy-httpd" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.172814 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.178081 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.199553 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.200849 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzsbb\" (UniqueName: \"kubernetes.io/projected/a451fb50-63dc-4dcc-8cf0-37e7f3d99888-kube-api-access-lzsbb\") pod \"nova-scheduler-0\" (UID: \"a451fb50-63dc-4dcc-8cf0-37e7f3d99888\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.200941 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a451fb50-63dc-4dcc-8cf0-37e7f3d99888-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a451fb50-63dc-4dcc-8cf0-37e7f3d99888\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.200972 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a451fb50-63dc-4dcc-8cf0-37e7f3d99888-config-data\") pod \"nova-scheduler-0\" (UID: \"a451fb50-63dc-4dcc-8cf0-37e7f3d99888\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.201093 4838 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.201108 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2x65g\" (UniqueName: \"kubernetes.io/projected/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-kube-api-access-2x65g\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.201125 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.201136 4838 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.206302 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a451fb50-63dc-4dcc-8cf0-37e7f3d99888-config-data\") pod \"nova-scheduler-0\" (UID: \"a451fb50-63dc-4dcc-8cf0-37e7f3d99888\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.207751 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" (UID: "7bc80d3e-05b1-4bfa-9c94-3d7c162420aa"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.210141 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a451fb50-63dc-4dcc-8cf0-37e7f3d99888-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a451fb50-63dc-4dcc-8cf0-37e7f3d99888\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.226210 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzsbb\" (UniqueName: \"kubernetes.io/projected/a451fb50-63dc-4dcc-8cf0-37e7f3d99888-kube-api-access-lzsbb\") pod \"nova-scheduler-0\" (UID: \"a451fb50-63dc-4dcc-8cf0-37e7f3d99888\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.279891 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-m9rhm" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.283092 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.290670 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" (UID: "7bc80d3e-05b1-4bfa-9c94-3d7c162420aa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.310925 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb\") " pod="openstack/nova-api-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.311040 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltf57\" (UniqueName: \"kubernetes.io/projected/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-kube-api-access-ltf57\") pod \"nova-api-0\" (UID: \"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb\") " pod="openstack/nova-api-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.312782 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-logs\") pod \"nova-api-0\" (UID: \"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb\") " pod="openstack/nova-api-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.312895 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-config-data\") pod \"nova-api-0\" (UID: \"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb\") " pod="openstack/nova-api-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.313139 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.313154 4838 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.318550 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-config-data" (OuterVolumeSpecName: "config-data") pod "7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" (UID: "7bc80d3e-05b1-4bfa-9c94-3d7c162420aa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.415482 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc01012d-a1bc-4849-8cc3-c0b3fc3f5504-combined-ca-bundle\") pod \"dc01012d-a1bc-4849-8cc3-c0b3fc3f5504\" (UID: \"dc01012d-a1bc-4849-8cc3-c0b3fc3f5504\") " Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.415557 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dc01012d-a1bc-4849-8cc3-c0b3fc3f5504-config\") pod \"dc01012d-a1bc-4849-8cc3-c0b3fc3f5504\" (UID: \"dc01012d-a1bc-4849-8cc3-c0b3fc3f5504\") " Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.415596 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rc4tl\" (UniqueName: \"kubernetes.io/projected/dc01012d-a1bc-4849-8cc3-c0b3fc3f5504-kube-api-access-rc4tl\") pod \"dc01012d-a1bc-4849-8cc3-c0b3fc3f5504\" (UID: \"dc01012d-a1bc-4849-8cc3-c0b3fc3f5504\") " Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.415922 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-logs\") pod \"nova-api-0\" (UID: \"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb\") " pod="openstack/nova-api-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.415979 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-config-data\") pod \"nova-api-0\" (UID: \"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb\") " pod="openstack/nova-api-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.416085 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb\") " pod="openstack/nova-api-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.416121 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltf57\" (UniqueName: \"kubernetes.io/projected/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-kube-api-access-ltf57\") pod \"nova-api-0\" (UID: \"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb\") " pod="openstack/nova-api-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.416219 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.417134 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-logs\") pod \"nova-api-0\" (UID: \"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb\") " pod="openstack/nova-api-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.423527 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc01012d-a1bc-4849-8cc3-c0b3fc3f5504-kube-api-access-rc4tl" (OuterVolumeSpecName: "kube-api-access-rc4tl") pod "dc01012d-a1bc-4849-8cc3-c0b3fc3f5504" (UID: "dc01012d-a1bc-4849-8cc3-c0b3fc3f5504"). InnerVolumeSpecName "kube-api-access-rc4tl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.424426 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb\") " pod="openstack/nova-api-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.425046 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-config-data\") pod \"nova-api-0\" (UID: \"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb\") " pod="openstack/nova-api-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.443264 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltf57\" (UniqueName: \"kubernetes.io/projected/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-kube-api-access-ltf57\") pod \"nova-api-0\" (UID: \"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb\") " pod="openstack/nova-api-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.465190 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc01012d-a1bc-4849-8cc3-c0b3fc3f5504-config" (OuterVolumeSpecName: "config") pod "dc01012d-a1bc-4849-8cc3-c0b3fc3f5504" (UID: "dc01012d-a1bc-4849-8cc3-c0b3fc3f5504"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.466316 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc01012d-a1bc-4849-8cc3-c0b3fc3f5504-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc01012d-a1bc-4849-8cc3-c0b3fc3f5504" (UID: "dc01012d-a1bc-4849-8cc3-c0b3fc3f5504"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.508608 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.517940 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc01012d-a1bc-4849-8cc3-c0b3fc3f5504-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.517975 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/dc01012d-a1bc-4849-8cc3-c0b3fc3f5504-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.517985 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rc4tl\" (UniqueName: \"kubernetes.io/projected/dc01012d-a1bc-4849-8cc3-c0b3fc3f5504-kube-api-access-rc4tl\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.543424 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06d2d265-0f5f-4b18-b02f-8c83b6c8d547" path="/var/lib/kubelet/pods/06d2d265-0f5f-4b18-b02f-8c83b6c8d547/volumes" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.552065 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f97c9e6f-a864-4903-b775-0cef0afed268" path="/var/lib/kubelet/pods/f97c9e6f-a864-4903-b775-0cef0afed268/volumes" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.765507 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-m9rhm" event={"ID":"dc01012d-a1bc-4849-8cc3-c0b3fc3f5504","Type":"ContainerDied","Data":"5663f3695d4e9a71d5a80c48ddfdc6aee679f359a6ec4c4747b39864198af827"} Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.765555 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5663f3695d4e9a71d5a80c48ddfdc6aee679f359a6ec4c4747b39864198af827" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.765645 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-m9rhm" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.783135 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bc80d3e-05b1-4bfa-9c94-3d7c162420aa","Type":"ContainerDied","Data":"4e82ad6f3d4ffc36061a30dfbb2eb66d097176ff8e1b190b3af10bccb7407012"} Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.783485 4838 scope.go:117] "RemoveContainer" containerID="d7d5343b65a57e6bc9243a2d381186e1b1cc09829c85f0e0377e8038c6d5b217" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.783167 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.817113 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"57bf0b31-2ff8-44ad-a509-be8868430dd2","Type":"ContainerDied","Data":"c9abda5bfd3c38cb8ce3f3904188986a22cbb8dabd7ceb51b651ade538eed826"} Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.817239 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.834424 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.836307 4838 generic.go:334] "Generic (PLEG): container finished" podID="81309430-b454-4c48-95da-f3dbed0ad937" containerID="88feb5fd9b891083958fe7aaef1505b23186a81da36a3676fbe0c4b92c34f961" exitCode=0 Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.836341 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxl9q" event={"ID":"81309430-b454-4c48-95da-f3dbed0ad937","Type":"ContainerDied","Data":"88feb5fd9b891083958fe7aaef1505b23186a81da36a3676fbe0c4b92c34f961"} Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.843401 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.857806 4838 scope.go:117] "RemoveContainer" containerID="18cc5dfadc68883ae64300002aa4f2a2b79b50fd4225429116d760319e35826d" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.857937 4838 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.866435 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-5ww45"] Feb 02 11:19:30 crc kubenswrapper[4838]: E0202 11:19:30.866878 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc01012d-a1bc-4849-8cc3-c0b3fc3f5504" containerName="neutron-db-sync" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.866895 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc01012d-a1bc-4849-8cc3-c0b3fc3f5504" containerName="neutron-db-sync" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.867132 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc01012d-a1bc-4849-8cc3-c0b3fc3f5504" containerName="neutron-db-sync" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.868148 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.908845 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.934247 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.944571 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.944837 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.945022 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.952250 4838 scope.go:117] "RemoveContainer" containerID="b78a230bd6b7841ac2d8cc3e207e9220466f64b3d78b9e04be7d93d07cfbc61a" Feb 02 11:19:30 crc kubenswrapper[4838]: I0202 11:19:30.968085 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.021609 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.057847 4838 scope.go:117] "RemoveContainer" containerID="81d235f814a1853993fc8a93e5e2fcd2d6f606b6a9168da6972bf64ba8ac9164" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.058757 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-config-data\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.058787 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7e894c9-38fb-4616-ab4b-68166a67b5b5-run-httpd\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.058828 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-dns-svc\") pod \"dnsmasq-dns-757b4f8459-5ww45\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.058872 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7e894c9-38fb-4616-ab4b-68166a67b5b5-log-httpd\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.058888 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.058906 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.058952 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzs5j\" (UniqueName: \"kubernetes.io/projected/3c81155e-9881-4ed2-bcec-7035aed80588-kube-api-access-gzs5j\") pod \"dnsmasq-dns-757b4f8459-5ww45\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.058971 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wk6v7\" (UniqueName: \"kubernetes.io/projected/d7e894c9-38fb-4616-ab4b-68166a67b5b5-kube-api-access-wk6v7\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.058986 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-5ww45\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.059015 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.059057 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-config\") pod \"dnsmasq-dns-757b4f8459-5ww45\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.059076 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-5ww45\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.059132 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-5ww45\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.059151 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-scripts\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.078725 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.111672 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-5ww45"] Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.140257 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.150764 4838 scope.go:117] "RemoveContainer" containerID="c51e726089d6adaff1969988117dbbc49db89913e29199c706a61d4438005328" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.161664 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.161716 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7e894c9-38fb-4616-ab4b-68166a67b5b5-log-httpd\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.161749 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.161820 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzs5j\" (UniqueName: \"kubernetes.io/projected/3c81155e-9881-4ed2-bcec-7035aed80588-kube-api-access-gzs5j\") pod \"dnsmasq-dns-757b4f8459-5ww45\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.161844 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wk6v7\" (UniqueName: \"kubernetes.io/projected/d7e894c9-38fb-4616-ab4b-68166a67b5b5-kube-api-access-wk6v7\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.161864 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-5ww45\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.161904 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.161964 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-config\") pod \"dnsmasq-dns-757b4f8459-5ww45\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.161990 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-5ww45\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.162073 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-5ww45\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.162103 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-scripts\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.162205 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-config-data\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.162238 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7e894c9-38fb-4616-ab4b-68166a67b5b5-run-httpd\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.162292 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-dns-svc\") pod \"dnsmasq-dns-757b4f8459-5ww45\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.163340 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-dns-svc\") pod \"dnsmasq-dns-757b4f8459-5ww45\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.166065 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-5ww45\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.166209 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7e894c9-38fb-4616-ab4b-68166a67b5b5-log-httpd\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.166790 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-config\") pod \"dnsmasq-dns-757b4f8459-5ww45\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.167365 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-5ww45\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.167412 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7e894c9-38fb-4616-ab4b-68166a67b5b5-run-httpd\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.168089 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-5ww45\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.183950 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.195300 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-scripts\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.195906 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.209260 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.223577 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzs5j\" (UniqueName: \"kubernetes.io/projected/3c81155e-9881-4ed2-bcec-7035aed80588-kube-api-access-gzs5j\") pod \"dnsmasq-dns-757b4f8459-5ww45\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.225713 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-config-data\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.226307 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wk6v7\" (UniqueName: \"kubernetes.io/projected/d7e894c9-38fb-4616-ab4b-68166a67b5b5-kube-api-access-wk6v7\") pod \"ceilometer-0\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.250696 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.252281 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.261374 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.264869 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.265135 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.286294 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.294234 4838 scope.go:117] "RemoveContainer" containerID="38f8ef9d6c64c283d622e3d34cbab7d94688ae6d7ba92a91f84e63dc4f5052aa" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.363053 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-55db57f78d-mdj8d"] Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.364690 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55db57f78d-mdj8d" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.374285 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.374929 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-fw9th" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.381278 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.383289 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.383550 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\") " pod="openstack/nova-metadata-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.383598 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-logs\") pod \"nova-metadata-0\" (UID: \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\") " pod="openstack/nova-metadata-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.383668 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-config-data\") pod \"nova-metadata-0\" (UID: \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\") " pod="openstack/nova-metadata-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.383716 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpftv\" (UniqueName: \"kubernetes.io/projected/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-kube-api-access-fpftv\") pod \"nova-metadata-0\" (UID: \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\") " pod="openstack/nova-metadata-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.383773 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\") " pod="openstack/nova-metadata-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.389212 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-55db57f78d-mdj8d"] Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.446008 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.485180 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-combined-ca-bundle\") pod \"neutron-55db57f78d-mdj8d\" (UID: \"7d6708b7-91c5-4090-8e1f-60061ca37055\") " pod="openstack/neutron-55db57f78d-mdj8d" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.485225 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\") " pod="openstack/nova-metadata-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.485281 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-ovndb-tls-certs\") pod \"neutron-55db57f78d-mdj8d\" (UID: \"7d6708b7-91c5-4090-8e1f-60061ca37055\") " pod="openstack/neutron-55db57f78d-mdj8d" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.489847 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-logs\") pod \"nova-metadata-0\" (UID: \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\") " pod="openstack/nova-metadata-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.489989 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-config-data\") pod \"nova-metadata-0\" (UID: \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\") " pod="openstack/nova-metadata-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.490049 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpftv\" (UniqueName: \"kubernetes.io/projected/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-kube-api-access-fpftv\") pod \"nova-metadata-0\" (UID: \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\") " pod="openstack/nova-metadata-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.490073 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqrsk\" (UniqueName: \"kubernetes.io/projected/7d6708b7-91c5-4090-8e1f-60061ca37055-kube-api-access-fqrsk\") pod \"neutron-55db57f78d-mdj8d\" (UID: \"7d6708b7-91c5-4090-8e1f-60061ca37055\") " pod="openstack/neutron-55db57f78d-mdj8d" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.490099 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-config\") pod \"neutron-55db57f78d-mdj8d\" (UID: \"7d6708b7-91c5-4090-8e1f-60061ca37055\") " pod="openstack/neutron-55db57f78d-mdj8d" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.490201 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-httpd-config\") pod \"neutron-55db57f78d-mdj8d\" (UID: \"7d6708b7-91c5-4090-8e1f-60061ca37055\") " pod="openstack/neutron-55db57f78d-mdj8d" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.490223 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\") " pod="openstack/nova-metadata-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.493118 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-logs\") pod \"nova-metadata-0\" (UID: \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\") " pod="openstack/nova-metadata-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.496133 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\") " pod="openstack/nova-metadata-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.496514 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\") " pod="openstack/nova-metadata-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.507212 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-config-data\") pod \"nova-metadata-0\" (UID: \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\") " pod="openstack/nova-metadata-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.507476 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.524367 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpftv\" (UniqueName: \"kubernetes.io/projected/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-kube-api-access-fpftv\") pod \"nova-metadata-0\" (UID: \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\") " pod="openstack/nova-metadata-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.580000 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.592641 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-combined-ca-bundle\") pod \"neutron-55db57f78d-mdj8d\" (UID: \"7d6708b7-91c5-4090-8e1f-60061ca37055\") " pod="openstack/neutron-55db57f78d-mdj8d" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.592717 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-ovndb-tls-certs\") pod \"neutron-55db57f78d-mdj8d\" (UID: \"7d6708b7-91c5-4090-8e1f-60061ca37055\") " pod="openstack/neutron-55db57f78d-mdj8d" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.592828 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqrsk\" (UniqueName: \"kubernetes.io/projected/7d6708b7-91c5-4090-8e1f-60061ca37055-kube-api-access-fqrsk\") pod \"neutron-55db57f78d-mdj8d\" (UID: \"7d6708b7-91c5-4090-8e1f-60061ca37055\") " pod="openstack/neutron-55db57f78d-mdj8d" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.592851 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-config\") pod \"neutron-55db57f78d-mdj8d\" (UID: \"7d6708b7-91c5-4090-8e1f-60061ca37055\") " pod="openstack/neutron-55db57f78d-mdj8d" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.592952 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-httpd-config\") pod \"neutron-55db57f78d-mdj8d\" (UID: \"7d6708b7-91c5-4090-8e1f-60061ca37055\") " pod="openstack/neutron-55db57f78d-mdj8d" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.599333 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-ovndb-tls-certs\") pod \"neutron-55db57f78d-mdj8d\" (UID: \"7d6708b7-91c5-4090-8e1f-60061ca37055\") " pod="openstack/neutron-55db57f78d-mdj8d" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.601197 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-config\") pod \"neutron-55db57f78d-mdj8d\" (UID: \"7d6708b7-91c5-4090-8e1f-60061ca37055\") " pod="openstack/neutron-55db57f78d-mdj8d" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.601296 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-combined-ca-bundle\") pod \"neutron-55db57f78d-mdj8d\" (UID: \"7d6708b7-91c5-4090-8e1f-60061ca37055\") " pod="openstack/neutron-55db57f78d-mdj8d" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.601851 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-httpd-config\") pod \"neutron-55db57f78d-mdj8d\" (UID: \"7d6708b7-91c5-4090-8e1f-60061ca37055\") " pod="openstack/neutron-55db57f78d-mdj8d" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.629370 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqrsk\" (UniqueName: \"kubernetes.io/projected/7d6708b7-91c5-4090-8e1f-60061ca37055-kube-api-access-fqrsk\") pod \"neutron-55db57f78d-mdj8d\" (UID: \"7d6708b7-91c5-4090-8e1f-60061ca37055\") " pod="openstack/neutron-55db57f78d-mdj8d" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.899516 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a451fb50-63dc-4dcc-8cf0-37e7f3d99888","Type":"ContainerStarted","Data":"f591c7a46fec62a9dde0a5336c90e0720e7d920b6ba91ca5a782075bd4f9bf0c"} Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.902646 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55db57f78d-mdj8d" Feb 02 11:19:31 crc kubenswrapper[4838]: I0202 11:19:31.925557 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb","Type":"ContainerStarted","Data":"cd558cabeb49e935aeef77b62be6108ba9f571895321cc1a1c11325742463853"} Feb 02 11:19:32 crc kubenswrapper[4838]: I0202 11:19:32.227415 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:19:32 crc kubenswrapper[4838]: I0202 11:19:32.347488 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-5ww45"] Feb 02 11:19:32 crc kubenswrapper[4838]: I0202 11:19:32.461875 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:19:32 crc kubenswrapper[4838]: I0202 11:19:32.559264 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57bf0b31-2ff8-44ad-a509-be8868430dd2" path="/var/lib/kubelet/pods/57bf0b31-2ff8-44ad-a509-be8868430dd2/volumes" Feb 02 11:19:32 crc kubenswrapper[4838]: I0202 11:19:32.559911 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bc80d3e-05b1-4bfa-9c94-3d7c162420aa" path="/var/lib/kubelet/pods/7bc80d3e-05b1-4bfa-9c94-3d7c162420aa/volumes" Feb 02 11:19:32 crc kubenswrapper[4838]: I0202 11:19:32.823779 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-55db57f78d-mdj8d"] Feb 02 11:19:32 crc kubenswrapper[4838]: I0202 11:19:32.947820 4838 generic.go:334] "Generic (PLEG): container finished" podID="81309430-b454-4c48-95da-f3dbed0ad937" containerID="a81d6b991b50a8abcd15e12c426be988a9c2fb0dba0aa6c3f8ee47116aedb1cd" exitCode=0 Feb 02 11:19:32 crc kubenswrapper[4838]: I0202 11:19:32.947889 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxl9q" event={"ID":"81309430-b454-4c48-95da-f3dbed0ad937","Type":"ContainerDied","Data":"a81d6b991b50a8abcd15e12c426be988a9c2fb0dba0aa6c3f8ee47116aedb1cd"} Feb 02 11:19:32 crc kubenswrapper[4838]: I0202 11:19:32.955312 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7e894c9-38fb-4616-ab4b-68166a67b5b5","Type":"ContainerStarted","Data":"2a2e09b49c27998bee521c2467a0f31340095d895a550024a9615bb2365b1e0b"} Feb 02 11:19:32 crc kubenswrapper[4838]: I0202 11:19:32.958124 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb","Type":"ContainerStarted","Data":"7e3f317c1e241798f96f230f8ba9eba56e2746b06de7e68b8bfd755058c0e297"} Feb 02 11:19:32 crc kubenswrapper[4838]: I0202 11:19:32.959872 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-5ww45" event={"ID":"3c81155e-9881-4ed2-bcec-7035aed80588","Type":"ContainerStarted","Data":"298b4e08c6354f1cbc9450ad53d80971787adf9db2b8890bb064087d492702ff"} Feb 02 11:19:32 crc kubenswrapper[4838]: I0202 11:19:32.961154 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"85cf2dd0-e8d9-4eba-a280-dba74e08cb91","Type":"ContainerStarted","Data":"23f1efff56dd44dfc964a04a7299588b1c2e05f0c4d0c344f71b570b5f70aaec"} Feb 02 11:19:32 crc kubenswrapper[4838]: I0202 11:19:32.973274 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a451fb50-63dc-4dcc-8cf0-37e7f3d99888","Type":"ContainerStarted","Data":"b883a3c56c53daf2c6a2397e34f188e4648e6dc11ae45a796c90005b16c39e6f"} Feb 02 11:19:32 crc kubenswrapper[4838]: I0202 11:19:32.975609 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55db57f78d-mdj8d" event={"ID":"7d6708b7-91c5-4090-8e1f-60061ca37055","Type":"ContainerStarted","Data":"604229b14684efec320d8467e0b7f3713182d277fc2c21b3e580ad79ee78fa09"} Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.000849 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.343365951 podStartE2EDuration="4.000828981s" podCreationTimestamp="2026-02-02 11:19:29 +0000 UTC" firstStartedPulling="2026-02-02 11:19:30.975964496 +0000 UTC m=+1565.313065524" lastFinishedPulling="2026-02-02 11:19:31.633427526 +0000 UTC m=+1565.970528554" observedRunningTime="2026-02-02 11:19:32.989875729 +0000 UTC m=+1567.326976787" watchObservedRunningTime="2026-02-02 11:19:33.000828981 +0000 UTC m=+1567.337930009" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.714363 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-595df946b7-5b7qm"] Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.718639 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.724139 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.724335 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.729378 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-595df946b7-5b7qm"] Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.861607 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de9fd7ce-936c-460d-b33b-e9b089a7d495-combined-ca-bundle\") pod \"neutron-595df946b7-5b7qm\" (UID: \"de9fd7ce-936c-460d-b33b-e9b089a7d495\") " pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.861961 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/de9fd7ce-936c-460d-b33b-e9b089a7d495-config\") pod \"neutron-595df946b7-5b7qm\" (UID: \"de9fd7ce-936c-460d-b33b-e9b089a7d495\") " pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.862000 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jmql\" (UniqueName: \"kubernetes.io/projected/de9fd7ce-936c-460d-b33b-e9b089a7d495-kube-api-access-4jmql\") pod \"neutron-595df946b7-5b7qm\" (UID: \"de9fd7ce-936c-460d-b33b-e9b089a7d495\") " pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.862055 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9fd7ce-936c-460d-b33b-e9b089a7d495-ovndb-tls-certs\") pod \"neutron-595df946b7-5b7qm\" (UID: \"de9fd7ce-936c-460d-b33b-e9b089a7d495\") " pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.862117 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/de9fd7ce-936c-460d-b33b-e9b089a7d495-httpd-config\") pod \"neutron-595df946b7-5b7qm\" (UID: \"de9fd7ce-936c-460d-b33b-e9b089a7d495\") " pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.862140 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9fd7ce-936c-460d-b33b-e9b089a7d495-internal-tls-certs\") pod \"neutron-595df946b7-5b7qm\" (UID: \"de9fd7ce-936c-460d-b33b-e9b089a7d495\") " pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.862218 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9fd7ce-936c-460d-b33b-e9b089a7d495-public-tls-certs\") pod \"neutron-595df946b7-5b7qm\" (UID: \"de9fd7ce-936c-460d-b33b-e9b089a7d495\") " pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.963639 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jmql\" (UniqueName: \"kubernetes.io/projected/de9fd7ce-936c-460d-b33b-e9b089a7d495-kube-api-access-4jmql\") pod \"neutron-595df946b7-5b7qm\" (UID: \"de9fd7ce-936c-460d-b33b-e9b089a7d495\") " pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.963704 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9fd7ce-936c-460d-b33b-e9b089a7d495-ovndb-tls-certs\") pod \"neutron-595df946b7-5b7qm\" (UID: \"de9fd7ce-936c-460d-b33b-e9b089a7d495\") " pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.963763 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/de9fd7ce-936c-460d-b33b-e9b089a7d495-httpd-config\") pod \"neutron-595df946b7-5b7qm\" (UID: \"de9fd7ce-936c-460d-b33b-e9b089a7d495\") " pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.963784 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9fd7ce-936c-460d-b33b-e9b089a7d495-internal-tls-certs\") pod \"neutron-595df946b7-5b7qm\" (UID: \"de9fd7ce-936c-460d-b33b-e9b089a7d495\") " pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.963848 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9fd7ce-936c-460d-b33b-e9b089a7d495-public-tls-certs\") pod \"neutron-595df946b7-5b7qm\" (UID: \"de9fd7ce-936c-460d-b33b-e9b089a7d495\") " pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.963918 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de9fd7ce-936c-460d-b33b-e9b089a7d495-combined-ca-bundle\") pod \"neutron-595df946b7-5b7qm\" (UID: \"de9fd7ce-936c-460d-b33b-e9b089a7d495\") " pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.963949 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/de9fd7ce-936c-460d-b33b-e9b089a7d495-config\") pod \"neutron-595df946b7-5b7qm\" (UID: \"de9fd7ce-936c-460d-b33b-e9b089a7d495\") " pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.970253 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/de9fd7ce-936c-460d-b33b-e9b089a7d495-config\") pod \"neutron-595df946b7-5b7qm\" (UID: \"de9fd7ce-936c-460d-b33b-e9b089a7d495\") " pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.972800 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9fd7ce-936c-460d-b33b-e9b089a7d495-ovndb-tls-certs\") pod \"neutron-595df946b7-5b7qm\" (UID: \"de9fd7ce-936c-460d-b33b-e9b089a7d495\") " pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.973387 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/de9fd7ce-936c-460d-b33b-e9b089a7d495-httpd-config\") pod \"neutron-595df946b7-5b7qm\" (UID: \"de9fd7ce-936c-460d-b33b-e9b089a7d495\") " pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.992172 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jmql\" (UniqueName: \"kubernetes.io/projected/de9fd7ce-936c-460d-b33b-e9b089a7d495-kube-api-access-4jmql\") pod \"neutron-595df946b7-5b7qm\" (UID: \"de9fd7ce-936c-460d-b33b-e9b089a7d495\") " pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:33 crc kubenswrapper[4838]: I0202 11:19:33.995801 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de9fd7ce-936c-460d-b33b-e9b089a7d495-combined-ca-bundle\") pod \"neutron-595df946b7-5b7qm\" (UID: \"de9fd7ce-936c-460d-b33b-e9b089a7d495\") " pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:34 crc kubenswrapper[4838]: I0202 11:19:34.001448 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9fd7ce-936c-460d-b33b-e9b089a7d495-internal-tls-certs\") pod \"neutron-595df946b7-5b7qm\" (UID: \"de9fd7ce-936c-460d-b33b-e9b089a7d495\") " pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:34 crc kubenswrapper[4838]: I0202 11:19:34.002465 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9fd7ce-936c-460d-b33b-e9b089a7d495-public-tls-certs\") pod \"neutron-595df946b7-5b7qm\" (UID: \"de9fd7ce-936c-460d-b33b-e9b089a7d495\") " pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:34 crc kubenswrapper[4838]: I0202 11:19:34.026819 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55db57f78d-mdj8d" event={"ID":"7d6708b7-91c5-4090-8e1f-60061ca37055","Type":"ContainerStarted","Data":"82660f60e665882b502595c6b18c8dfda06ee7965108b4b37daf07039c16abcd"} Feb 02 11:19:34 crc kubenswrapper[4838]: I0202 11:19:34.037177 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:34 crc kubenswrapper[4838]: I0202 11:19:34.048755 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb","Type":"ContainerStarted","Data":"893e42201bfbe73cbc9cfb9c341cca8133d393c5a8b0b83423989ee93225e855"} Feb 02 11:19:34 crc kubenswrapper[4838]: I0202 11:19:34.065953 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-5ww45" event={"ID":"3c81155e-9881-4ed2-bcec-7035aed80588","Type":"ContainerStarted","Data":"a53f9634b4ee823444d4cca2daffa6f3d61b1f6290df168d42f2c024cb8c37d1"} Feb 02 11:19:34 crc kubenswrapper[4838]: I0202 11:19:34.078793 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"85cf2dd0-e8d9-4eba-a280-dba74e08cb91","Type":"ContainerStarted","Data":"ae017bc95b2af3f6e54879b85b24b475f068b97103df3a18f1ae0841a7fb1d43"} Feb 02 11:19:34 crc kubenswrapper[4838]: I0202 11:19:34.821235 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-595df946b7-5b7qm"] Feb 02 11:19:35 crc kubenswrapper[4838]: I0202 11:19:35.088888 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7e894c9-38fb-4616-ab4b-68166a67b5b5","Type":"ContainerStarted","Data":"f4e24cc1537c5a70eedcada4b7f4e5cd77efa76aeb73ab7c15fbda99f6fe46b4"} Feb 02 11:19:35 crc kubenswrapper[4838]: I0202 11:19:35.090677 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-595df946b7-5b7qm" event={"ID":"de9fd7ce-936c-460d-b33b-e9b089a7d495","Type":"ContainerStarted","Data":"e454686da63119b5f577d305b83c0137dfe49f0ef01c01e386b16a768fbe3bec"} Feb 02 11:19:35 crc kubenswrapper[4838]: I0202 11:19:35.092655 4838 generic.go:334] "Generic (PLEG): container finished" podID="3c81155e-9881-4ed2-bcec-7035aed80588" containerID="a53f9634b4ee823444d4cca2daffa6f3d61b1f6290df168d42f2c024cb8c37d1" exitCode=0 Feb 02 11:19:35 crc kubenswrapper[4838]: I0202 11:19:35.092909 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-5ww45" event={"ID":"3c81155e-9881-4ed2-bcec-7035aed80588","Type":"ContainerDied","Data":"a53f9634b4ee823444d4cca2daffa6f3d61b1f6290df168d42f2c024cb8c37d1"} Feb 02 11:19:35 crc kubenswrapper[4838]: I0202 11:19:35.095104 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"85cf2dd0-e8d9-4eba-a280-dba74e08cb91","Type":"ContainerStarted","Data":"625e818ba91644b17531ae65cf32edd2bc9cd2dec981b20e52fc83131711f658"} Feb 02 11:19:35 crc kubenswrapper[4838]: I0202 11:19:35.098674 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55db57f78d-mdj8d" event={"ID":"7d6708b7-91c5-4090-8e1f-60061ca37055","Type":"ContainerStarted","Data":"4526e7d2d33a8a9946a0b12ad48b9cb8e7dc9dd25c6ee4956284f9466b4bfd04"} Feb 02 11:19:35 crc kubenswrapper[4838]: I0202 11:19:35.098828 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-55db57f78d-mdj8d" Feb 02 11:19:35 crc kubenswrapper[4838]: I0202 11:19:35.105668 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxl9q" event={"ID":"81309430-b454-4c48-95da-f3dbed0ad937","Type":"ContainerStarted","Data":"e2f33d2573c5cb2fdef9776a6bac12f17737182effda706f908a89d82eca1ed3"} Feb 02 11:19:35 crc kubenswrapper[4838]: I0202 11:19:35.151094 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=5.151070255 podStartE2EDuration="5.151070255s" podCreationTimestamp="2026-02-02 11:19:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:19:35.147592222 +0000 UTC m=+1569.484693250" watchObservedRunningTime="2026-02-02 11:19:35.151070255 +0000 UTC m=+1569.488171283" Feb 02 11:19:35 crc kubenswrapper[4838]: I0202 11:19:35.179866 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=5.179844011 podStartE2EDuration="5.179844011s" podCreationTimestamp="2026-02-02 11:19:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:19:35.170087381 +0000 UTC m=+1569.507188409" watchObservedRunningTime="2026-02-02 11:19:35.179844011 +0000 UTC m=+1569.516945029" Feb 02 11:19:35 crc kubenswrapper[4838]: I0202 11:19:35.226279 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cxl9q" podStartSLOduration=3.40415214 podStartE2EDuration="7.226250587s" podCreationTimestamp="2026-02-02 11:19:28 +0000 UTC" firstStartedPulling="2026-02-02 11:19:30.857655316 +0000 UTC m=+1565.194756344" lastFinishedPulling="2026-02-02 11:19:34.679753763 +0000 UTC m=+1569.016854791" observedRunningTime="2026-02-02 11:19:35.2120898 +0000 UTC m=+1569.549190828" watchObservedRunningTime="2026-02-02 11:19:35.226250587 +0000 UTC m=+1569.563351625" Feb 02 11:19:35 crc kubenswrapper[4838]: I0202 11:19:35.251016 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-55db57f78d-mdj8d" podStartSLOduration=4.250976185 podStartE2EDuration="4.250976185s" podCreationTimestamp="2026-02-02 11:19:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:19:35.239362216 +0000 UTC m=+1569.576463254" watchObservedRunningTime="2026-02-02 11:19:35.250976185 +0000 UTC m=+1569.588077213" Feb 02 11:19:35 crc kubenswrapper[4838]: I0202 11:19:35.284829 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 02 11:19:36 crc kubenswrapper[4838]: I0202 11:19:36.123771 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-595df946b7-5b7qm" event={"ID":"de9fd7ce-936c-460d-b33b-e9b089a7d495","Type":"ContainerStarted","Data":"486cd8856b23362979fdf054a93fbff0f3e5020aba1973c9e400b32cbf669ebe"} Feb 02 11:19:36 crc kubenswrapper[4838]: I0202 11:19:36.124201 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-595df946b7-5b7qm" event={"ID":"de9fd7ce-936c-460d-b33b-e9b089a7d495","Type":"ContainerStarted","Data":"40db2d4d351a1033969786e61efac0f968c77aedba1d5e2db8b9c217b61fd26c"} Feb 02 11:19:36 crc kubenswrapper[4838]: I0202 11:19:36.124255 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:19:36 crc kubenswrapper[4838]: I0202 11:19:36.128185 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-5ww45" event={"ID":"3c81155e-9881-4ed2-bcec-7035aed80588","Type":"ContainerStarted","Data":"eb7c61580160aafc830c2316da7aa59476cc37a6b075e760c58e1eb752864b0f"} Feb 02 11:19:36 crc kubenswrapper[4838]: I0202 11:19:36.181800 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-595df946b7-5b7qm" podStartSLOduration=3.181777583 podStartE2EDuration="3.181777583s" podCreationTimestamp="2026-02-02 11:19:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:19:36.155506693 +0000 UTC m=+1570.492607741" watchObservedRunningTime="2026-02-02 11:19:36.181777583 +0000 UTC m=+1570.518878611" Feb 02 11:19:36 crc kubenswrapper[4838]: I0202 11:19:36.204129 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-757b4f8459-5ww45" podStartSLOduration=6.204109527 podStartE2EDuration="6.204109527s" podCreationTimestamp="2026-02-02 11:19:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:19:36.174589241 +0000 UTC m=+1570.511690269" watchObservedRunningTime="2026-02-02 11:19:36.204109527 +0000 UTC m=+1570.541210555" Feb 02 11:19:36 crc kubenswrapper[4838]: I0202 11:19:36.530211 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:36 crc kubenswrapper[4838]: I0202 11:19:36.580678 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 11:19:36 crc kubenswrapper[4838]: I0202 11:19:36.580733 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 11:19:37 crc kubenswrapper[4838]: I0202 11:19:37.138911 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7e894c9-38fb-4616-ab4b-68166a67b5b5","Type":"ContainerStarted","Data":"bd4490d7bdb69d9d04b638bd8d5d83d860065b5e34f6279b5819dad5e39ce15f"} Feb 02 11:19:38 crc kubenswrapper[4838]: I0202 11:19:38.499132 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cxl9q" Feb 02 11:19:38 crc kubenswrapper[4838]: I0202 11:19:38.500314 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cxl9q" Feb 02 11:19:38 crc kubenswrapper[4838]: I0202 11:19:38.549238 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cxl9q" Feb 02 11:19:39 crc kubenswrapper[4838]: I0202 11:19:39.164514 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7e894c9-38fb-4616-ab4b-68166a67b5b5","Type":"ContainerStarted","Data":"963539a18de0b512aefb2312077654994bf416d1972194f00551666db57fe8c3"} Feb 02 11:19:39 crc kubenswrapper[4838]: I0202 11:19:39.221573 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cxl9q" Feb 02 11:19:39 crc kubenswrapper[4838]: I0202 11:19:39.280420 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cxl9q"] Feb 02 11:19:39 crc kubenswrapper[4838]: E0202 11:19:39.829933 4838 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/4ad5c69c735469ae066114ff1ce83d507717f5860993b57189a67b47d52fdf1c/diff" to get inode usage: stat /var/lib/containers/storage/overlay/4ad5c69c735469ae066114ff1ce83d507717f5860993b57189a67b47d52fdf1c/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_ceilometer-0_7bc80d3e-05b1-4bfa-9c94-3d7c162420aa/ceilometer-central-agent/0.log" to get inode usage: stat /var/log/pods/openstack_ceilometer-0_7bc80d3e-05b1-4bfa-9c94-3d7c162420aa/ceilometer-central-agent/0.log: no such file or directory Feb 02 11:19:40 crc kubenswrapper[4838]: I0202 11:19:40.284124 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 02 11:19:40 crc kubenswrapper[4838]: I0202 11:19:40.321972 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 02 11:19:40 crc kubenswrapper[4838]: I0202 11:19:40.519945 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 11:19:40 crc kubenswrapper[4838]: I0202 11:19:40.520006 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 11:19:41 crc kubenswrapper[4838]: I0202 11:19:41.181254 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cxl9q" podUID="81309430-b454-4c48-95da-f3dbed0ad937" containerName="registry-server" containerID="cri-o://e2f33d2573c5cb2fdef9776a6bac12f17737182effda706f908a89d82eca1ed3" gracePeriod=2 Feb 02 11:19:41 crc kubenswrapper[4838]: I0202 11:19:41.216234 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 02 11:19:41 crc kubenswrapper[4838]: E0202 11:19:41.308214 4838 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/96cc89f83486f93a0b6e3c340f5b90c7a1b7758288051a6cdbf47e41362dd66a/diff" to get inode usage: stat /var/lib/containers/storage/overlay/96cc89f83486f93a0b6e3c340f5b90c7a1b7758288051a6cdbf47e41362dd66a/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_ceilometer-0_7bc80d3e-05b1-4bfa-9c94-3d7c162420aa/ceilometer-notification-agent/0.log" to get inode usage: stat /var/log/pods/openstack_ceilometer-0_7bc80d3e-05b1-4bfa-9c94-3d7c162420aa/ceilometer-notification-agent/0.log: no such file or directory Feb 02 11:19:41 crc kubenswrapper[4838]: I0202 11:19:41.509804 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:19:41 crc kubenswrapper[4838]: I0202 11:19:41.569327 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78d445889f-948j8"] Feb 02 11:19:41 crc kubenswrapper[4838]: I0202 11:19:41.573821 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-78d445889f-948j8" podUID="232b1b0a-b35d-4834-a4bb-3a1ed7de6f71" containerName="dnsmasq-dns" containerID="cri-o://010eaa33f02c4787fa4116a23dc7014dbc08122d9a29861bd4c02497dc2f3fa3" gracePeriod=10 Feb 02 11:19:41 crc kubenswrapper[4838]: I0202 11:19:41.584089 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 02 11:19:41 crc kubenswrapper[4838]: I0202 11:19:41.584128 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 02 11:19:41 crc kubenswrapper[4838]: I0202 11:19:41.593281 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="98e7b3ca-1138-4df5-a2c4-f1b03b4352bb" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.205:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 11:19:41 crc kubenswrapper[4838]: I0202 11:19:41.593598 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="98e7b3ca-1138-4df5-a2c4-f1b03b4352bb" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.205:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.206479 4838 generic.go:334] "Generic (PLEG): container finished" podID="232b1b0a-b35d-4834-a4bb-3a1ed7de6f71" containerID="010eaa33f02c4787fa4116a23dc7014dbc08122d9a29861bd4c02497dc2f3fa3" exitCode=0 Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.206883 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78d445889f-948j8" event={"ID":"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71","Type":"ContainerDied","Data":"010eaa33f02c4787fa4116a23dc7014dbc08122d9a29861bd4c02497dc2f3fa3"} Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.209050 4838 generic.go:334] "Generic (PLEG): container finished" podID="81309430-b454-4c48-95da-f3dbed0ad937" containerID="e2f33d2573c5cb2fdef9776a6bac12f17737182effda706f908a89d82eca1ed3" exitCode=0 Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.209096 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxl9q" event={"ID":"81309430-b454-4c48-95da-f3dbed0ad937","Type":"ContainerDied","Data":"e2f33d2573c5cb2fdef9776a6bac12f17737182effda706f908a89d82eca1ed3"} Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.428211 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.512718 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-config\") pod \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.512857 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fdg8b\" (UniqueName: \"kubernetes.io/projected/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-kube-api-access-fdg8b\") pod \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.513015 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-ovsdbserver-nb\") pod \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.513091 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-dns-swift-storage-0\") pod \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.513200 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-dns-svc\") pod \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.513227 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-ovsdbserver-sb\") pod \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\" (UID: \"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71\") " Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.587271 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "232b1b0a-b35d-4834-a4bb-3a1ed7de6f71" (UID: "232b1b0a-b35d-4834-a4bb-3a1ed7de6f71"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.588802 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-kube-api-access-fdg8b" (OuterVolumeSpecName: "kube-api-access-fdg8b") pod "232b1b0a-b35d-4834-a4bb-3a1ed7de6f71" (UID: "232b1b0a-b35d-4834-a4bb-3a1ed7de6f71"). InnerVolumeSpecName "kube-api-access-fdg8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.617705 4838 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.617747 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fdg8b\" (UniqueName: \"kubernetes.io/projected/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-kube-api-access-fdg8b\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.625298 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-config" (OuterVolumeSpecName: "config") pod "232b1b0a-b35d-4834-a4bb-3a1ed7de6f71" (UID: "232b1b0a-b35d-4834-a4bb-3a1ed7de6f71"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.632303 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="85cf2dd0-e8d9-4eba-a280-dba74e08cb91" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.208:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.632400 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="85cf2dd0-e8d9-4eba-a280-dba74e08cb91" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.208:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.649378 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "232b1b0a-b35d-4834-a4bb-3a1ed7de6f71" (UID: "232b1b0a-b35d-4834-a4bb-3a1ed7de6f71"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.682566 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "232b1b0a-b35d-4834-a4bb-3a1ed7de6f71" (UID: "232b1b0a-b35d-4834-a4bb-3a1ed7de6f71"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.720367 4838 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.720404 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.720416 4838 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.720661 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "232b1b0a-b35d-4834-a4bb-3a1ed7de6f71" (UID: "232b1b0a-b35d-4834-a4bb-3a1ed7de6f71"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.822191 4838 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.829548 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cxl9q" Feb 02 11:19:42 crc kubenswrapper[4838]: E0202 11:19:42.892884 4838 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/d0dd95ebd1bc6674c7c464dc11b204a2931c4595093668027239f423019c4817/diff" to get inode usage: stat /var/lib/containers/storage/overlay/d0dd95ebd1bc6674c7c464dc11b204a2931c4595093668027239f423019c4817/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_ceilometer-0_7bc80d3e-05b1-4bfa-9c94-3d7c162420aa/sg-core/0.log" to get inode usage: stat /var/log/pods/openstack_ceilometer-0_7bc80d3e-05b1-4bfa-9c94-3d7c162420aa/sg-core/0.log: no such file or directory Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.923192 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81309430-b454-4c48-95da-f3dbed0ad937-catalog-content\") pod \"81309430-b454-4c48-95da-f3dbed0ad937\" (UID: \"81309430-b454-4c48-95da-f3dbed0ad937\") " Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.923444 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81309430-b454-4c48-95da-f3dbed0ad937-utilities\") pod \"81309430-b454-4c48-95da-f3dbed0ad937\" (UID: \"81309430-b454-4c48-95da-f3dbed0ad937\") " Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.923583 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjjtj\" (UniqueName: \"kubernetes.io/projected/81309430-b454-4c48-95da-f3dbed0ad937-kube-api-access-rjjtj\") pod \"81309430-b454-4c48-95da-f3dbed0ad937\" (UID: \"81309430-b454-4c48-95da-f3dbed0ad937\") " Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.925287 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81309430-b454-4c48-95da-f3dbed0ad937-utilities" (OuterVolumeSpecName: "utilities") pod "81309430-b454-4c48-95da-f3dbed0ad937" (UID: "81309430-b454-4c48-95da-f3dbed0ad937"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:19:42 crc kubenswrapper[4838]: I0202 11:19:42.928767 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81309430-b454-4c48-95da-f3dbed0ad937-kube-api-access-rjjtj" (OuterVolumeSpecName: "kube-api-access-rjjtj") pod "81309430-b454-4c48-95da-f3dbed0ad937" (UID: "81309430-b454-4c48-95da-f3dbed0ad937"). InnerVolumeSpecName "kube-api-access-rjjtj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:19:43 crc kubenswrapper[4838]: I0202 11:19:43.026000 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjjtj\" (UniqueName: \"kubernetes.io/projected/81309430-b454-4c48-95da-f3dbed0ad937-kube-api-access-rjjtj\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:43 crc kubenswrapper[4838]: I0202 11:19:43.026271 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81309430-b454-4c48-95da-f3dbed0ad937-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:43 crc kubenswrapper[4838]: I0202 11:19:43.045524 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81309430-b454-4c48-95da-f3dbed0ad937-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "81309430-b454-4c48-95da-f3dbed0ad937" (UID: "81309430-b454-4c48-95da-f3dbed0ad937"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:19:43 crc kubenswrapper[4838]: I0202 11:19:43.128606 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81309430-b454-4c48-95da-f3dbed0ad937-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:43 crc kubenswrapper[4838]: I0202 11:19:43.220879 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7e894c9-38fb-4616-ab4b-68166a67b5b5","Type":"ContainerStarted","Data":"aae89210173f273a66f974d9f4ae0187e6c7992306e5fdefa17a0e221c99711c"} Feb 02 11:19:43 crc kubenswrapper[4838]: I0202 11:19:43.222262 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 11:19:43 crc kubenswrapper[4838]: I0202 11:19:43.226874 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78d445889f-948j8" Feb 02 11:19:43 crc kubenswrapper[4838]: I0202 11:19:43.226868 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78d445889f-948j8" event={"ID":"232b1b0a-b35d-4834-a4bb-3a1ed7de6f71","Type":"ContainerDied","Data":"adddfa0a3b712e2ee614e717c4aa9923456562be9403be804d30246b1c61579f"} Feb 02 11:19:43 crc kubenswrapper[4838]: I0202 11:19:43.227371 4838 scope.go:117] "RemoveContainer" containerID="010eaa33f02c4787fa4116a23dc7014dbc08122d9a29861bd4c02497dc2f3fa3" Feb 02 11:19:43 crc kubenswrapper[4838]: I0202 11:19:43.230983 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxl9q" event={"ID":"81309430-b454-4c48-95da-f3dbed0ad937","Type":"ContainerDied","Data":"02d6c25e0687a436d3f24eebf3c27c8a62c3e9f0297686c2354ec69403eee35e"} Feb 02 11:19:43 crc kubenswrapper[4838]: I0202 11:19:43.231109 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cxl9q" Feb 02 11:19:43 crc kubenswrapper[4838]: I0202 11:19:43.256584 4838 scope.go:117] "RemoveContainer" containerID="dee22b825ffd2fda3a4ac6f30ab7dd95a42ada67172e2d8a5ff74021eaa4adaf" Feb 02 11:19:43 crc kubenswrapper[4838]: I0202 11:19:43.273486 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.670642597 podStartE2EDuration="13.273466562s" podCreationTimestamp="2026-02-02 11:19:30 +0000 UTC" firstStartedPulling="2026-02-02 11:19:32.281973806 +0000 UTC m=+1566.619074834" lastFinishedPulling="2026-02-02 11:19:41.884797761 +0000 UTC m=+1576.221898799" observedRunningTime="2026-02-02 11:19:43.271155251 +0000 UTC m=+1577.608256289" watchObservedRunningTime="2026-02-02 11:19:43.273466562 +0000 UTC m=+1577.610567590" Feb 02 11:19:43 crc kubenswrapper[4838]: I0202 11:19:43.305412 4838 scope.go:117] "RemoveContainer" containerID="e2f33d2573c5cb2fdef9776a6bac12f17737182effda706f908a89d82eca1ed3" Feb 02 11:19:43 crc kubenswrapper[4838]: I0202 11:19:43.332887 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78d445889f-948j8"] Feb 02 11:19:43 crc kubenswrapper[4838]: I0202 11:19:43.343489 4838 scope.go:117] "RemoveContainer" containerID="a81d6b991b50a8abcd15e12c426be988a9c2fb0dba0aa6c3f8ee47116aedb1cd" Feb 02 11:19:43 crc kubenswrapper[4838]: I0202 11:19:43.348785 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78d445889f-948j8"] Feb 02 11:19:43 crc kubenswrapper[4838]: I0202 11:19:43.363059 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cxl9q"] Feb 02 11:19:43 crc kubenswrapper[4838]: I0202 11:19:43.372943 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cxl9q"] Feb 02 11:19:43 crc kubenswrapper[4838]: I0202 11:19:43.383565 4838 scope.go:117] "RemoveContainer" containerID="88feb5fd9b891083958fe7aaef1505b23186a81da36a3676fbe0c4b92c34f961" Feb 02 11:19:44 crc kubenswrapper[4838]: I0202 11:19:44.517459 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="232b1b0a-b35d-4834-a4bb-3a1ed7de6f71" path="/var/lib/kubelet/pods/232b1b0a-b35d-4834-a4bb-3a1ed7de6f71/volumes" Feb 02 11:19:44 crc kubenswrapper[4838]: I0202 11:19:44.518487 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81309430-b454-4c48-95da-f3dbed0ad937" path="/var/lib/kubelet/pods/81309430-b454-4c48-95da-f3dbed0ad937/volumes" Feb 02 11:19:45 crc kubenswrapper[4838]: I0202 11:19:45.243478 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 11:19:45 crc kubenswrapper[4838]: I0202 11:19:45.243759 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="98e7b3ca-1138-4df5-a2c4-f1b03b4352bb" containerName="nova-api-log" containerID="cri-o://7e3f317c1e241798f96f230f8ba9eba56e2746b06de7e68b8bfd755058c0e297" gracePeriod=30 Feb 02 11:19:45 crc kubenswrapper[4838]: I0202 11:19:45.244158 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="98e7b3ca-1138-4df5-a2c4-f1b03b4352bb" containerName="nova-api-api" containerID="cri-o://893e42201bfbe73cbc9cfb9c341cca8133d393c5a8b0b83423989ee93225e855" gracePeriod=30 Feb 02 11:19:45 crc kubenswrapper[4838]: I0202 11:19:45.262593 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 11:19:45 crc kubenswrapper[4838]: I0202 11:19:45.263833 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="c15c4499-e01e-474c-b653-efaf4af4c881" containerName="nova-cell0-conductor-conductor" containerID="cri-o://055ed90fce55d1295e3df2c4f7224b7f2ef165eba0200ab8cb13afa92a4cd6d0" gracePeriod=30 Feb 02 11:19:45 crc kubenswrapper[4838]: I0202 11:19:45.275990 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 11:19:45 crc kubenswrapper[4838]: I0202 11:19:45.276435 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="a451fb50-63dc-4dcc-8cf0-37e7f3d99888" containerName="nova-scheduler-scheduler" containerID="cri-o://b883a3c56c53daf2c6a2397e34f188e4648e6dc11ae45a796c90005b16c39e6f" gracePeriod=30 Feb 02 11:19:45 crc kubenswrapper[4838]: E0202 11:19:45.285897 4838 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b883a3c56c53daf2c6a2397e34f188e4648e6dc11ae45a796c90005b16c39e6f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 02 11:19:45 crc kubenswrapper[4838]: I0202 11:19:45.295516 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:19:45 crc kubenswrapper[4838]: I0202 11:19:45.295901 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="85cf2dd0-e8d9-4eba-a280-dba74e08cb91" containerName="nova-metadata-log" containerID="cri-o://ae017bc95b2af3f6e54879b85b24b475f068b97103df3a18f1ae0841a7fb1d43" gracePeriod=30 Feb 02 11:19:45 crc kubenswrapper[4838]: I0202 11:19:45.296383 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="85cf2dd0-e8d9-4eba-a280-dba74e08cb91" containerName="nova-metadata-metadata" containerID="cri-o://625e818ba91644b17531ae65cf32edd2bc9cd2dec981b20e52fc83131711f658" gracePeriod=30 Feb 02 11:19:45 crc kubenswrapper[4838]: E0202 11:19:45.298907 4838 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b883a3c56c53daf2c6a2397e34f188e4648e6dc11ae45a796c90005b16c39e6f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 02 11:19:45 crc kubenswrapper[4838]: E0202 11:19:45.305758 4838 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b883a3c56c53daf2c6a2397e34f188e4648e6dc11ae45a796c90005b16c39e6f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 02 11:19:45 crc kubenswrapper[4838]: E0202 11:19:45.305828 4838 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="a451fb50-63dc-4dcc-8cf0-37e7f3d99888" containerName="nova-scheduler-scheduler" Feb 02 11:19:45 crc kubenswrapper[4838]: I0202 11:19:45.430149 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:19:45 crc kubenswrapper[4838]: I0202 11:19:45.430691 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:19:45 crc kubenswrapper[4838]: I0202 11:19:45.887602 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:19:46 crc kubenswrapper[4838]: I0202 11:19:46.151935 4838 scope.go:117] "RemoveContainer" containerID="8b0bd4e83d9d936d1c40703a20fe501e869ef58a30c1d3e654ade208f5d2fa7f" Feb 02 11:19:46 crc kubenswrapper[4838]: E0202 11:19:46.269901 4838 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/5696ae279f2504f2d37af11cc2251cc0834069fa5cccc2c04b713daac26113fe/diff" to get inode usage: stat /var/lib/containers/storage/overlay/5696ae279f2504f2d37af11cc2251cc0834069fa5cccc2c04b713daac26113fe/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_ceilometer-0_7bc80d3e-05b1-4bfa-9c94-3d7c162420aa/proxy-httpd/0.log" to get inode usage: stat /var/log/pods/openstack_ceilometer-0_7bc80d3e-05b1-4bfa-9c94-3d7c162420aa/proxy-httpd/0.log: no such file or directory Feb 02 11:19:46 crc kubenswrapper[4838]: I0202 11:19:46.306347 4838 generic.go:334] "Generic (PLEG): container finished" podID="85cf2dd0-e8d9-4eba-a280-dba74e08cb91" containerID="ae017bc95b2af3f6e54879b85b24b475f068b97103df3a18f1ae0841a7fb1d43" exitCode=143 Feb 02 11:19:46 crc kubenswrapper[4838]: I0202 11:19:46.306678 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"85cf2dd0-e8d9-4eba-a280-dba74e08cb91","Type":"ContainerDied","Data":"ae017bc95b2af3f6e54879b85b24b475f068b97103df3a18f1ae0841a7fb1d43"} Feb 02 11:19:46 crc kubenswrapper[4838]: I0202 11:19:46.309000 4838 generic.go:334] "Generic (PLEG): container finished" podID="98e7b3ca-1138-4df5-a2c4-f1b03b4352bb" containerID="7e3f317c1e241798f96f230f8ba9eba56e2746b06de7e68b8bfd755058c0e297" exitCode=143 Feb 02 11:19:46 crc kubenswrapper[4838]: I0202 11:19:46.309192 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb","Type":"ContainerDied","Data":"7e3f317c1e241798f96f230f8ba9eba56e2746b06de7e68b8bfd755058c0e297"} Feb 02 11:19:46 crc kubenswrapper[4838]: I0202 11:19:46.309310 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d7e894c9-38fb-4616-ab4b-68166a67b5b5" containerName="ceilometer-central-agent" containerID="cri-o://f4e24cc1537c5a70eedcada4b7f4e5cd77efa76aeb73ab7c15fbda99f6fe46b4" gracePeriod=30 Feb 02 11:19:46 crc kubenswrapper[4838]: I0202 11:19:46.309386 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d7e894c9-38fb-4616-ab4b-68166a67b5b5" containerName="proxy-httpd" containerID="cri-o://aae89210173f273a66f974d9f4ae0187e6c7992306e5fdefa17a0e221c99711c" gracePeriod=30 Feb 02 11:19:46 crc kubenswrapper[4838]: I0202 11:19:46.309388 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d7e894c9-38fb-4616-ab4b-68166a67b5b5" containerName="ceilometer-notification-agent" containerID="cri-o://bd4490d7bdb69d9d04b638bd8d5d83d860065b5e34f6279b5819dad5e39ce15f" gracePeriod=30 Feb 02 11:19:46 crc kubenswrapper[4838]: I0202 11:19:46.309439 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d7e894c9-38fb-4616-ab4b-68166a67b5b5" containerName="sg-core" containerID="cri-o://963539a18de0b512aefb2312077654994bf416d1972194f00551666db57fe8c3" gracePeriod=30 Feb 02 11:19:46 crc kubenswrapper[4838]: E0202 11:19:46.451571 4838 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="055ed90fce55d1295e3df2c4f7224b7f2ef165eba0200ab8cb13afa92a4cd6d0" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 02 11:19:46 crc kubenswrapper[4838]: E0202 11:19:46.479113 4838 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="055ed90fce55d1295e3df2c4f7224b7f2ef165eba0200ab8cb13afa92a4cd6d0" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 02 11:19:46 crc kubenswrapper[4838]: E0202 11:19:46.489357 4838 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="055ed90fce55d1295e3df2c4f7224b7f2ef165eba0200ab8cb13afa92a4cd6d0" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 02 11:19:46 crc kubenswrapper[4838]: E0202 11:19:46.489426 4838 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="c15c4499-e01e-474c-b653-efaf4af4c881" containerName="nova-cell0-conductor-conductor" Feb 02 11:19:47 crc kubenswrapper[4838]: I0202 11:19:47.322280 4838 generic.go:334] "Generic (PLEG): container finished" podID="d7e894c9-38fb-4616-ab4b-68166a67b5b5" containerID="aae89210173f273a66f974d9f4ae0187e6c7992306e5fdefa17a0e221c99711c" exitCode=0 Feb 02 11:19:47 crc kubenswrapper[4838]: I0202 11:19:47.322316 4838 generic.go:334] "Generic (PLEG): container finished" podID="d7e894c9-38fb-4616-ab4b-68166a67b5b5" containerID="963539a18de0b512aefb2312077654994bf416d1972194f00551666db57fe8c3" exitCode=2 Feb 02 11:19:47 crc kubenswrapper[4838]: I0202 11:19:47.322323 4838 generic.go:334] "Generic (PLEG): container finished" podID="d7e894c9-38fb-4616-ab4b-68166a67b5b5" containerID="bd4490d7bdb69d9d04b638bd8d5d83d860065b5e34f6279b5819dad5e39ce15f" exitCode=0 Feb 02 11:19:47 crc kubenswrapper[4838]: I0202 11:19:47.322359 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7e894c9-38fb-4616-ab4b-68166a67b5b5","Type":"ContainerDied","Data":"aae89210173f273a66f974d9f4ae0187e6c7992306e5fdefa17a0e221c99711c"} Feb 02 11:19:47 crc kubenswrapper[4838]: I0202 11:19:47.322432 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7e894c9-38fb-4616-ab4b-68166a67b5b5","Type":"ContainerDied","Data":"963539a18de0b512aefb2312077654994bf416d1972194f00551666db57fe8c3"} Feb 02 11:19:47 crc kubenswrapper[4838]: I0202 11:19:47.322449 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7e894c9-38fb-4616-ab4b-68166a67b5b5","Type":"ContainerDied","Data":"bd4490d7bdb69d9d04b638bd8d5d83d860065b5e34f6279b5819dad5e39ce15f"} Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.129284 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.225439 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a451fb50-63dc-4dcc-8cf0-37e7f3d99888-combined-ca-bundle\") pod \"a451fb50-63dc-4dcc-8cf0-37e7f3d99888\" (UID: \"a451fb50-63dc-4dcc-8cf0-37e7f3d99888\") " Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.225713 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzsbb\" (UniqueName: \"kubernetes.io/projected/a451fb50-63dc-4dcc-8cf0-37e7f3d99888-kube-api-access-lzsbb\") pod \"a451fb50-63dc-4dcc-8cf0-37e7f3d99888\" (UID: \"a451fb50-63dc-4dcc-8cf0-37e7f3d99888\") " Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.225760 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a451fb50-63dc-4dcc-8cf0-37e7f3d99888-config-data\") pod \"a451fb50-63dc-4dcc-8cf0-37e7f3d99888\" (UID: \"a451fb50-63dc-4dcc-8cf0-37e7f3d99888\") " Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.231168 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a451fb50-63dc-4dcc-8cf0-37e7f3d99888-kube-api-access-lzsbb" (OuterVolumeSpecName: "kube-api-access-lzsbb") pod "a451fb50-63dc-4dcc-8cf0-37e7f3d99888" (UID: "a451fb50-63dc-4dcc-8cf0-37e7f3d99888"). InnerVolumeSpecName "kube-api-access-lzsbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.275891 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a451fb50-63dc-4dcc-8cf0-37e7f3d99888-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a451fb50-63dc-4dcc-8cf0-37e7f3d99888" (UID: "a451fb50-63dc-4dcc-8cf0-37e7f3d99888"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.298059 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a451fb50-63dc-4dcc-8cf0-37e7f3d99888-config-data" (OuterVolumeSpecName: "config-data") pod "a451fb50-63dc-4dcc-8cf0-37e7f3d99888" (UID: "a451fb50-63dc-4dcc-8cf0-37e7f3d99888"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.328117 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzsbb\" (UniqueName: \"kubernetes.io/projected/a451fb50-63dc-4dcc-8cf0-37e7f3d99888-kube-api-access-lzsbb\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.328150 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a451fb50-63dc-4dcc-8cf0-37e7f3d99888-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.328163 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a451fb50-63dc-4dcc-8cf0-37e7f3d99888-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.338739 4838 generic.go:334] "Generic (PLEG): container finished" podID="c15c4499-e01e-474c-b653-efaf4af4c881" containerID="055ed90fce55d1295e3df2c4f7224b7f2ef165eba0200ab8cb13afa92a4cd6d0" exitCode=0 Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.338788 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"c15c4499-e01e-474c-b653-efaf4af4c881","Type":"ContainerDied","Data":"055ed90fce55d1295e3df2c4f7224b7f2ef165eba0200ab8cb13afa92a4cd6d0"} Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.340952 4838 generic.go:334] "Generic (PLEG): container finished" podID="a451fb50-63dc-4dcc-8cf0-37e7f3d99888" containerID="b883a3c56c53daf2c6a2397e34f188e4648e6dc11ae45a796c90005b16c39e6f" exitCode=0 Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.341005 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a451fb50-63dc-4dcc-8cf0-37e7f3d99888","Type":"ContainerDied","Data":"b883a3c56c53daf2c6a2397e34f188e4648e6dc11ae45a796c90005b16c39e6f"} Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.341028 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a451fb50-63dc-4dcc-8cf0-37e7f3d99888","Type":"ContainerDied","Data":"f591c7a46fec62a9dde0a5336c90e0720e7d920b6ba91ca5a782075bd4f9bf0c"} Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.341049 4838 scope.go:117] "RemoveContainer" containerID="b883a3c56c53daf2c6a2397e34f188e4648e6dc11ae45a796c90005b16c39e6f" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.341191 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.345172 4838 generic.go:334] "Generic (PLEG): container finished" podID="7ce26605-8dfc-48cd-a362-1a37c67ea300" containerID="4823bcd344d625ad561c7b904f665bccc6207c62fe0a225cf8d050bc4a5c40fd" exitCode=0 Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.345212 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-5tpqb" event={"ID":"7ce26605-8dfc-48cd-a362-1a37c67ea300","Type":"ContainerDied","Data":"4823bcd344d625ad561c7b904f665bccc6207c62fe0a225cf8d050bc4a5c40fd"} Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.370494 4838 scope.go:117] "RemoveContainer" containerID="b883a3c56c53daf2c6a2397e34f188e4648e6dc11ae45a796c90005b16c39e6f" Feb 02 11:19:48 crc kubenswrapper[4838]: E0202 11:19:48.371044 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b883a3c56c53daf2c6a2397e34f188e4648e6dc11ae45a796c90005b16c39e6f\": container with ID starting with b883a3c56c53daf2c6a2397e34f188e4648e6dc11ae45a796c90005b16c39e6f not found: ID does not exist" containerID="b883a3c56c53daf2c6a2397e34f188e4648e6dc11ae45a796c90005b16c39e6f" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.371082 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b883a3c56c53daf2c6a2397e34f188e4648e6dc11ae45a796c90005b16c39e6f"} err="failed to get container status \"b883a3c56c53daf2c6a2397e34f188e4648e6dc11ae45a796c90005b16c39e6f\": rpc error: code = NotFound desc = could not find container \"b883a3c56c53daf2c6a2397e34f188e4648e6dc11ae45a796c90005b16c39e6f\": container with ID starting with b883a3c56c53daf2c6a2397e34f188e4648e6dc11ae45a796c90005b16c39e6f not found: ID does not exist" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.398868 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.421686 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.431054 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 11:19:48 crc kubenswrapper[4838]: E0202 11:19:48.431558 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="232b1b0a-b35d-4834-a4bb-3a1ed7de6f71" containerName="init" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.431573 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="232b1b0a-b35d-4834-a4bb-3a1ed7de6f71" containerName="init" Feb 02 11:19:48 crc kubenswrapper[4838]: E0202 11:19:48.431583 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81309430-b454-4c48-95da-f3dbed0ad937" containerName="registry-server" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.431590 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="81309430-b454-4c48-95da-f3dbed0ad937" containerName="registry-server" Feb 02 11:19:48 crc kubenswrapper[4838]: E0202 11:19:48.431651 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81309430-b454-4c48-95da-f3dbed0ad937" containerName="extract-utilities" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.431661 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="81309430-b454-4c48-95da-f3dbed0ad937" containerName="extract-utilities" Feb 02 11:19:48 crc kubenswrapper[4838]: E0202 11:19:48.431678 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="232b1b0a-b35d-4834-a4bb-3a1ed7de6f71" containerName="dnsmasq-dns" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.431686 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="232b1b0a-b35d-4834-a4bb-3a1ed7de6f71" containerName="dnsmasq-dns" Feb 02 11:19:48 crc kubenswrapper[4838]: E0202 11:19:48.431698 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81309430-b454-4c48-95da-f3dbed0ad937" containerName="extract-content" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.431705 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="81309430-b454-4c48-95da-f3dbed0ad937" containerName="extract-content" Feb 02 11:19:48 crc kubenswrapper[4838]: E0202 11:19:48.431721 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a451fb50-63dc-4dcc-8cf0-37e7f3d99888" containerName="nova-scheduler-scheduler" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.431728 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="a451fb50-63dc-4dcc-8cf0-37e7f3d99888" containerName="nova-scheduler-scheduler" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.431954 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="a451fb50-63dc-4dcc-8cf0-37e7f3d99888" containerName="nova-scheduler-scheduler" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.431968 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="232b1b0a-b35d-4834-a4bb-3a1ed7de6f71" containerName="dnsmasq-dns" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.431985 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="81309430-b454-4c48-95da-f3dbed0ad937" containerName="registry-server" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.432740 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.439130 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.441671 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.520552 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a451fb50-63dc-4dcc-8cf0-37e7f3d99888" path="/var/lib/kubelet/pods/a451fb50-63dc-4dcc-8cf0-37e7f3d99888/volumes" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.531894 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf592065-13f1-4594-9642-6f7c039c42ad-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cf592065-13f1-4594-9642-6f7c039c42ad\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.531974 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf592065-13f1-4594-9642-6f7c039c42ad-config-data\") pod \"nova-scheduler-0\" (UID: \"cf592065-13f1-4594-9642-6f7c039c42ad\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.532016 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxt5g\" (UniqueName: \"kubernetes.io/projected/cf592065-13f1-4594-9642-6f7c039c42ad-kube-api-access-bxt5g\") pod \"nova-scheduler-0\" (UID: \"cf592065-13f1-4594-9642-6f7c039c42ad\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.633439 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf592065-13f1-4594-9642-6f7c039c42ad-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cf592065-13f1-4594-9642-6f7c039c42ad\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.633509 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf592065-13f1-4594-9642-6f7c039c42ad-config-data\") pod \"nova-scheduler-0\" (UID: \"cf592065-13f1-4594-9642-6f7c039c42ad\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.633560 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxt5g\" (UniqueName: \"kubernetes.io/projected/cf592065-13f1-4594-9642-6f7c039c42ad-kube-api-access-bxt5g\") pod \"nova-scheduler-0\" (UID: \"cf592065-13f1-4594-9642-6f7c039c42ad\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.638452 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf592065-13f1-4594-9642-6f7c039c42ad-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cf592065-13f1-4594-9642-6f7c039c42ad\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.646467 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf592065-13f1-4594-9642-6f7c039c42ad-config-data\") pod \"nova-scheduler-0\" (UID: \"cf592065-13f1-4594-9642-6f7c039c42ad\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.663281 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxt5g\" (UniqueName: \"kubernetes.io/projected/cf592065-13f1-4594-9642-6f7c039c42ad-kube-api-access-bxt5g\") pod \"nova-scheduler-0\" (UID: \"cf592065-13f1-4594-9642-6f7c039c42ad\") " pod="openstack/nova-scheduler-0" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.758098 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.916997 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 02 11:19:48 crc kubenswrapper[4838]: I0202 11:19:48.987267 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.048903 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c15c4499-e01e-474c-b653-efaf4af4c881-combined-ca-bundle\") pod \"c15c4499-e01e-474c-b653-efaf4af4c881\" (UID: \"c15c4499-e01e-474c-b653-efaf4af4c881\") " Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.048980 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c15c4499-e01e-474c-b653-efaf4af4c881-config-data\") pod \"c15c4499-e01e-474c-b653-efaf4af4c881\" (UID: \"c15c4499-e01e-474c-b653-efaf4af4c881\") " Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.049038 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-ceilometer-tls-certs\") pod \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.049065 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-scripts\") pod \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.049114 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wk6v7\" (UniqueName: \"kubernetes.io/projected/d7e894c9-38fb-4616-ab4b-68166a67b5b5-kube-api-access-wk6v7\") pod \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.049152 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7e894c9-38fb-4616-ab4b-68166a67b5b5-run-httpd\") pod \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.049196 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-sg-core-conf-yaml\") pod \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.049248 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qsrjb\" (UniqueName: \"kubernetes.io/projected/c15c4499-e01e-474c-b653-efaf4af4c881-kube-api-access-qsrjb\") pod \"c15c4499-e01e-474c-b653-efaf4af4c881\" (UID: \"c15c4499-e01e-474c-b653-efaf4af4c881\") " Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.049305 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-config-data\") pod \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.049365 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7e894c9-38fb-4616-ab4b-68166a67b5b5-log-httpd\") pod \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.049387 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-combined-ca-bundle\") pod \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\" (UID: \"d7e894c9-38fb-4616-ab4b-68166a67b5b5\") " Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.053535 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7e894c9-38fb-4616-ab4b-68166a67b5b5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d7e894c9-38fb-4616-ab4b-68166a67b5b5" (UID: "d7e894c9-38fb-4616-ab4b-68166a67b5b5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.053983 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7e894c9-38fb-4616-ab4b-68166a67b5b5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d7e894c9-38fb-4616-ab4b-68166a67b5b5" (UID: "d7e894c9-38fb-4616-ab4b-68166a67b5b5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.059300 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c15c4499-e01e-474c-b653-efaf4af4c881-kube-api-access-qsrjb" (OuterVolumeSpecName: "kube-api-access-qsrjb") pod "c15c4499-e01e-474c-b653-efaf4af4c881" (UID: "c15c4499-e01e-474c-b653-efaf4af4c881"). InnerVolumeSpecName "kube-api-access-qsrjb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.059292 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-scripts" (OuterVolumeSpecName: "scripts") pod "d7e894c9-38fb-4616-ab4b-68166a67b5b5" (UID: "d7e894c9-38fb-4616-ab4b-68166a67b5b5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.069842 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7e894c9-38fb-4616-ab4b-68166a67b5b5-kube-api-access-wk6v7" (OuterVolumeSpecName: "kube-api-access-wk6v7") pod "d7e894c9-38fb-4616-ab4b-68166a67b5b5" (UID: "d7e894c9-38fb-4616-ab4b-68166a67b5b5"). InnerVolumeSpecName "kube-api-access-wk6v7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.105001 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c15c4499-e01e-474c-b653-efaf4af4c881-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c15c4499-e01e-474c-b653-efaf4af4c881" (UID: "c15c4499-e01e-474c-b653-efaf4af4c881"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.107753 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c15c4499-e01e-474c-b653-efaf4af4c881-config-data" (OuterVolumeSpecName: "config-data") pod "c15c4499-e01e-474c-b653-efaf4af4c881" (UID: "c15c4499-e01e-474c-b653-efaf4af4c881"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.114211 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d7e894c9-38fb-4616-ab4b-68166a67b5b5" (UID: "d7e894c9-38fb-4616-ab4b-68166a67b5b5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.164003 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c15c4499-e01e-474c-b653-efaf4af4c881-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.164050 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c15c4499-e01e-474c-b653-efaf4af4c881-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.164059 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.164068 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wk6v7\" (UniqueName: \"kubernetes.io/projected/d7e894c9-38fb-4616-ab4b-68166a67b5b5-kube-api-access-wk6v7\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.164078 4838 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7e894c9-38fb-4616-ab4b-68166a67b5b5-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.164086 4838 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.164097 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qsrjb\" (UniqueName: \"kubernetes.io/projected/c15c4499-e01e-474c-b653-efaf4af4c881-kube-api-access-qsrjb\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.164107 4838 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7e894c9-38fb-4616-ab4b-68166a67b5b5-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.194767 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "d7e894c9-38fb-4616-ab4b-68166a67b5b5" (UID: "d7e894c9-38fb-4616-ab4b-68166a67b5b5"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.197811 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d7e894c9-38fb-4616-ab4b-68166a67b5b5" (UID: "d7e894c9-38fb-4616-ab4b-68166a67b5b5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.256160 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.265850 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.265890 4838 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.272730 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-config-data" (OuterVolumeSpecName: "config-data") pod "d7e894c9-38fb-4616-ab4b-68166a67b5b5" (UID: "d7e894c9-38fb-4616-ab4b-68166a67b5b5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.363485 4838 generic.go:334] "Generic (PLEG): container finished" podID="d7e894c9-38fb-4616-ab4b-68166a67b5b5" containerID="f4e24cc1537c5a70eedcada4b7f4e5cd77efa76aeb73ab7c15fbda99f6fe46b4" exitCode=0 Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.364033 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7e894c9-38fb-4616-ab4b-68166a67b5b5","Type":"ContainerDied","Data":"f4e24cc1537c5a70eedcada4b7f4e5cd77efa76aeb73ab7c15fbda99f6fe46b4"} Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.364057 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7e894c9-38fb-4616-ab4b-68166a67b5b5","Type":"ContainerDied","Data":"2a2e09b49c27998bee521c2467a0f31340095d895a550024a9615bb2365b1e0b"} Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.364081 4838 scope.go:117] "RemoveContainer" containerID="aae89210173f273a66f974d9f4ae0187e6c7992306e5fdefa17a0e221c99711c" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.364200 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.366740 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-combined-ca-bundle\") pod \"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb\" (UID: \"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb\") " Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.366916 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltf57\" (UniqueName: \"kubernetes.io/projected/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-kube-api-access-ltf57\") pod \"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb\" (UID: \"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb\") " Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.366969 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-config-data\") pod \"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb\" (UID: \"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb\") " Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.367074 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-logs\") pod \"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb\" (UID: \"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb\") " Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.367602 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7e894c9-38fb-4616-ab4b-68166a67b5b5-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.368145 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-logs" (OuterVolumeSpecName: "logs") pod "98e7b3ca-1138-4df5-a2c4-f1b03b4352bb" (UID: "98e7b3ca-1138-4df5-a2c4-f1b03b4352bb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.372761 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-kube-api-access-ltf57" (OuterVolumeSpecName: "kube-api-access-ltf57") pod "98e7b3ca-1138-4df5-a2c4-f1b03b4352bb" (UID: "98e7b3ca-1138-4df5-a2c4-f1b03b4352bb"). InnerVolumeSpecName "kube-api-access-ltf57". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.379114 4838 generic.go:334] "Generic (PLEG): container finished" podID="98e7b3ca-1138-4df5-a2c4-f1b03b4352bb" containerID="893e42201bfbe73cbc9cfb9c341cca8133d393c5a8b0b83423989ee93225e855" exitCode=0 Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.379195 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb","Type":"ContainerDied","Data":"893e42201bfbe73cbc9cfb9c341cca8133d393c5a8b0b83423989ee93225e855"} Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.379223 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"98e7b3ca-1138-4df5-a2c4-f1b03b4352bb","Type":"ContainerDied","Data":"cd558cabeb49e935aeef77b62be6108ba9f571895321cc1a1c11325742463853"} Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.379305 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.383547 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.383579 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"c15c4499-e01e-474c-b653-efaf4af4c881","Type":"ContainerDied","Data":"c7b210765ba5020ed5046659450c59367e9a0045e3dd03e1b0081661fd65dfe2"} Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.399300 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-config-data" (OuterVolumeSpecName: "config-data") pod "98e7b3ca-1138-4df5-a2c4-f1b03b4352bb" (UID: "98e7b3ca-1138-4df5-a2c4-f1b03b4352bb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.415534 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.427690 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.438861 4838 scope.go:117] "RemoveContainer" containerID="963539a18de0b512aefb2312077654994bf416d1972194f00551666db57fe8c3" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.440291 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "98e7b3ca-1138-4df5-a2c4-f1b03b4352bb" (UID: "98e7b3ca-1138-4df5-a2c4-f1b03b4352bb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.441059 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.459374 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.470450 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.470703 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltf57\" (UniqueName: \"kubernetes.io/projected/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-kube-api-access-ltf57\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.470780 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.470848 4838 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb-logs\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.476676 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:19:49 crc kubenswrapper[4838]: E0202 11:19:49.477111 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7e894c9-38fb-4616-ab4b-68166a67b5b5" containerName="ceilometer-notification-agent" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.477127 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7e894c9-38fb-4616-ab4b-68166a67b5b5" containerName="ceilometer-notification-agent" Feb 02 11:19:49 crc kubenswrapper[4838]: E0202 11:19:49.477145 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7e894c9-38fb-4616-ab4b-68166a67b5b5" containerName="ceilometer-central-agent" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.477153 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7e894c9-38fb-4616-ab4b-68166a67b5b5" containerName="ceilometer-central-agent" Feb 02 11:19:49 crc kubenswrapper[4838]: E0202 11:19:49.477182 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c15c4499-e01e-474c-b653-efaf4af4c881" containerName="nova-cell0-conductor-conductor" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.477191 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="c15c4499-e01e-474c-b653-efaf4af4c881" containerName="nova-cell0-conductor-conductor" Feb 02 11:19:49 crc kubenswrapper[4838]: E0202 11:19:49.477209 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7e894c9-38fb-4616-ab4b-68166a67b5b5" containerName="proxy-httpd" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.477216 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7e894c9-38fb-4616-ab4b-68166a67b5b5" containerName="proxy-httpd" Feb 02 11:19:49 crc kubenswrapper[4838]: E0202 11:19:49.477227 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98e7b3ca-1138-4df5-a2c4-f1b03b4352bb" containerName="nova-api-log" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.477233 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="98e7b3ca-1138-4df5-a2c4-f1b03b4352bb" containerName="nova-api-log" Feb 02 11:19:49 crc kubenswrapper[4838]: E0202 11:19:49.477245 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7e894c9-38fb-4616-ab4b-68166a67b5b5" containerName="sg-core" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.477252 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7e894c9-38fb-4616-ab4b-68166a67b5b5" containerName="sg-core" Feb 02 11:19:49 crc kubenswrapper[4838]: E0202 11:19:49.477269 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98e7b3ca-1138-4df5-a2c4-f1b03b4352bb" containerName="nova-api-api" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.477276 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="98e7b3ca-1138-4df5-a2c4-f1b03b4352bb" containerName="nova-api-api" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.477475 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="98e7b3ca-1138-4df5-a2c4-f1b03b4352bb" containerName="nova-api-log" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.477496 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7e894c9-38fb-4616-ab4b-68166a67b5b5" containerName="sg-core" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.477513 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7e894c9-38fb-4616-ab4b-68166a67b5b5" containerName="proxy-httpd" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.477528 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="98e7b3ca-1138-4df5-a2c4-f1b03b4352bb" containerName="nova-api-api" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.477546 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7e894c9-38fb-4616-ab4b-68166a67b5b5" containerName="ceilometer-central-agent" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.477561 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="c15c4499-e01e-474c-b653-efaf4af4c881" containerName="nova-cell0-conductor-conductor" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.477574 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7e894c9-38fb-4616-ab4b-68166a67b5b5" containerName="ceilometer-notification-agent" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.479761 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.487378 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.496810 4838 scope.go:117] "RemoveContainer" containerID="bd4490d7bdb69d9d04b638bd8d5d83d860065b5e34f6279b5819dad5e39ce15f" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.497023 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.497267 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.497306 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.502941 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.504071 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.507152 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.518868 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.559997 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.571965 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-config-data\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.572029 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.572055 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29a3c9fb-a43f-4867-94e9-dd205f0fb517-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"29a3c9fb-a43f-4867-94e9-dd205f0fb517\") " pod="openstack/nova-cell0-conductor-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.572095 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.572137 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-run-httpd\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.572163 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29a3c9fb-a43f-4867-94e9-dd205f0fb517-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"29a3c9fb-a43f-4867-94e9-dd205f0fb517\") " pod="openstack/nova-cell0-conductor-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.572183 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rqft\" (UniqueName: \"kubernetes.io/projected/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-kube-api-access-8rqft\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.572197 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9xsd\" (UniqueName: \"kubernetes.io/projected/29a3c9fb-a43f-4867-94e9-dd205f0fb517-kube-api-access-f9xsd\") pod \"nova-cell0-conductor-0\" (UID: \"29a3c9fb-a43f-4867-94e9-dd205f0fb517\") " pod="openstack/nova-cell0-conductor-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.572243 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-log-httpd\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.572257 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.572298 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-scripts\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.665706 4838 scope.go:117] "RemoveContainer" containerID="f4e24cc1537c5a70eedcada4b7f4e5cd77efa76aeb73ab7c15fbda99f6fe46b4" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.673873 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-config-data\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.674030 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.674120 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29a3c9fb-a43f-4867-94e9-dd205f0fb517-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"29a3c9fb-a43f-4867-94e9-dd205f0fb517\") " pod="openstack/nova-cell0-conductor-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.674199 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.674273 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-run-httpd\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.674336 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29a3c9fb-a43f-4867-94e9-dd205f0fb517-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"29a3c9fb-a43f-4867-94e9-dd205f0fb517\") " pod="openstack/nova-cell0-conductor-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.674403 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rqft\" (UniqueName: \"kubernetes.io/projected/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-kube-api-access-8rqft\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.674475 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9xsd\" (UniqueName: \"kubernetes.io/projected/29a3c9fb-a43f-4867-94e9-dd205f0fb517-kube-api-access-f9xsd\") pod \"nova-cell0-conductor-0\" (UID: \"29a3c9fb-a43f-4867-94e9-dd205f0fb517\") " pod="openstack/nova-cell0-conductor-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.674565 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-log-httpd\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.674686 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.674795 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-scripts\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.675932 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-run-httpd\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.677998 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-log-httpd\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.679301 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.683266 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29a3c9fb-a43f-4867-94e9-dd205f0fb517-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"29a3c9fb-a43f-4867-94e9-dd205f0fb517\") " pod="openstack/nova-cell0-conductor-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.683605 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29a3c9fb-a43f-4867-94e9-dd205f0fb517-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"29a3c9fb-a43f-4867-94e9-dd205f0fb517\") " pod="openstack/nova-cell0-conductor-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.684443 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-config-data\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.686228 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.686832 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.686800 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-scripts\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.695547 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9xsd\" (UniqueName: \"kubernetes.io/projected/29a3c9fb-a43f-4867-94e9-dd205f0fb517-kube-api-access-f9xsd\") pod \"nova-cell0-conductor-0\" (UID: \"29a3c9fb-a43f-4867-94e9-dd205f0fb517\") " pod="openstack/nova-cell0-conductor-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.704640 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rqft\" (UniqueName: \"kubernetes.io/projected/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-kube-api-access-8rqft\") pod \"ceilometer-0\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.768360 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.823402 4838 scope.go:117] "RemoveContainer" containerID="aae89210173f273a66f974d9f4ae0187e6c7992306e5fdefa17a0e221c99711c" Feb 02 11:19:49 crc kubenswrapper[4838]: E0202 11:19:49.823948 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aae89210173f273a66f974d9f4ae0187e6c7992306e5fdefa17a0e221c99711c\": container with ID starting with aae89210173f273a66f974d9f4ae0187e6c7992306e5fdefa17a0e221c99711c not found: ID does not exist" containerID="aae89210173f273a66f974d9f4ae0187e6c7992306e5fdefa17a0e221c99711c" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.823981 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aae89210173f273a66f974d9f4ae0187e6c7992306e5fdefa17a0e221c99711c"} err="failed to get container status \"aae89210173f273a66f974d9f4ae0187e6c7992306e5fdefa17a0e221c99711c\": rpc error: code = NotFound desc = could not find container \"aae89210173f273a66f974d9f4ae0187e6c7992306e5fdefa17a0e221c99711c\": container with ID starting with aae89210173f273a66f974d9f4ae0187e6c7992306e5fdefa17a0e221c99711c not found: ID does not exist" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.824023 4838 scope.go:117] "RemoveContainer" containerID="963539a18de0b512aefb2312077654994bf416d1972194f00551666db57fe8c3" Feb 02 11:19:49 crc kubenswrapper[4838]: E0202 11:19:49.829252 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"963539a18de0b512aefb2312077654994bf416d1972194f00551666db57fe8c3\": container with ID starting with 963539a18de0b512aefb2312077654994bf416d1972194f00551666db57fe8c3 not found: ID does not exist" containerID="963539a18de0b512aefb2312077654994bf416d1972194f00551666db57fe8c3" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.829298 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"963539a18de0b512aefb2312077654994bf416d1972194f00551666db57fe8c3"} err="failed to get container status \"963539a18de0b512aefb2312077654994bf416d1972194f00551666db57fe8c3\": rpc error: code = NotFound desc = could not find container \"963539a18de0b512aefb2312077654994bf416d1972194f00551666db57fe8c3\": container with ID starting with 963539a18de0b512aefb2312077654994bf416d1972194f00551666db57fe8c3 not found: ID does not exist" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.829326 4838 scope.go:117] "RemoveContainer" containerID="bd4490d7bdb69d9d04b638bd8d5d83d860065b5e34f6279b5819dad5e39ce15f" Feb 02 11:19:49 crc kubenswrapper[4838]: E0202 11:19:49.831047 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd4490d7bdb69d9d04b638bd8d5d83d860065b5e34f6279b5819dad5e39ce15f\": container with ID starting with bd4490d7bdb69d9d04b638bd8d5d83d860065b5e34f6279b5819dad5e39ce15f not found: ID does not exist" containerID="bd4490d7bdb69d9d04b638bd8d5d83d860065b5e34f6279b5819dad5e39ce15f" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.831094 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd4490d7bdb69d9d04b638bd8d5d83d860065b5e34f6279b5819dad5e39ce15f"} err="failed to get container status \"bd4490d7bdb69d9d04b638bd8d5d83d860065b5e34f6279b5819dad5e39ce15f\": rpc error: code = NotFound desc = could not find container \"bd4490d7bdb69d9d04b638bd8d5d83d860065b5e34f6279b5819dad5e39ce15f\": container with ID starting with bd4490d7bdb69d9d04b638bd8d5d83d860065b5e34f6279b5819dad5e39ce15f not found: ID does not exist" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.831126 4838 scope.go:117] "RemoveContainer" containerID="f4e24cc1537c5a70eedcada4b7f4e5cd77efa76aeb73ab7c15fbda99f6fe46b4" Feb 02 11:19:49 crc kubenswrapper[4838]: E0202 11:19:49.833217 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4e24cc1537c5a70eedcada4b7f4e5cd77efa76aeb73ab7c15fbda99f6fe46b4\": container with ID starting with f4e24cc1537c5a70eedcada4b7f4e5cd77efa76aeb73ab7c15fbda99f6fe46b4 not found: ID does not exist" containerID="f4e24cc1537c5a70eedcada4b7f4e5cd77efa76aeb73ab7c15fbda99f6fe46b4" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.833275 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4e24cc1537c5a70eedcada4b7f4e5cd77efa76aeb73ab7c15fbda99f6fe46b4"} err="failed to get container status \"f4e24cc1537c5a70eedcada4b7f4e5cd77efa76aeb73ab7c15fbda99f6fe46b4\": rpc error: code = NotFound desc = could not find container \"f4e24cc1537c5a70eedcada4b7f4e5cd77efa76aeb73ab7c15fbda99f6fe46b4\": container with ID starting with f4e24cc1537c5a70eedcada4b7f4e5cd77efa76aeb73ab7c15fbda99f6fe46b4 not found: ID does not exist" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.833303 4838 scope.go:117] "RemoveContainer" containerID="893e42201bfbe73cbc9cfb9c341cca8133d393c5a8b0b83423989ee93225e855" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.942986 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-5tpqb" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.945375 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.953138 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.961974 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.985402 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ce26605-8dfc-48cd-a362-1a37c67ea300-config-data\") pod \"7ce26605-8dfc-48cd-a362-1a37c67ea300\" (UID: \"7ce26605-8dfc-48cd-a362-1a37c67ea300\") " Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.985504 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ce26605-8dfc-48cd-a362-1a37c67ea300-scripts\") pod \"7ce26605-8dfc-48cd-a362-1a37c67ea300\" (UID: \"7ce26605-8dfc-48cd-a362-1a37c67ea300\") " Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.985655 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bl4n2\" (UniqueName: \"kubernetes.io/projected/7ce26605-8dfc-48cd-a362-1a37c67ea300-kube-api-access-bl4n2\") pod \"7ce26605-8dfc-48cd-a362-1a37c67ea300\" (UID: \"7ce26605-8dfc-48cd-a362-1a37c67ea300\") " Feb 02 11:19:49 crc kubenswrapper[4838]: I0202 11:19:49.985754 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ce26605-8dfc-48cd-a362-1a37c67ea300-combined-ca-bundle\") pod \"7ce26605-8dfc-48cd-a362-1a37c67ea300\" (UID: \"7ce26605-8dfc-48cd-a362-1a37c67ea300\") " Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.019432 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 02 11:19:50 crc kubenswrapper[4838]: E0202 11:19:50.019933 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ce26605-8dfc-48cd-a362-1a37c67ea300" containerName="nova-cell1-conductor-db-sync" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.019952 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ce26605-8dfc-48cd-a362-1a37c67ea300" containerName="nova-cell1-conductor-db-sync" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.038681 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ce26605-8dfc-48cd-a362-1a37c67ea300" containerName="nova-cell1-conductor-db-sync" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.039980 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.049041 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.072562 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ce26605-8dfc-48cd-a362-1a37c67ea300-kube-api-access-bl4n2" (OuterVolumeSpecName: "kube-api-access-bl4n2") pod "7ce26605-8dfc-48cd-a362-1a37c67ea300" (UID: "7ce26605-8dfc-48cd-a362-1a37c67ea300"). InnerVolumeSpecName "kube-api-access-bl4n2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.072818 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ce26605-8dfc-48cd-a362-1a37c67ea300-scripts" (OuterVolumeSpecName: "scripts") pod "7ce26605-8dfc-48cd-a362-1a37c67ea300" (UID: "7ce26605-8dfc-48cd-a362-1a37c67ea300"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.094883 4838 scope.go:117] "RemoveContainer" containerID="7e3f317c1e241798f96f230f8ba9eba56e2746b06de7e68b8bfd755058c0e297" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.096648 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/098dda3e-e72b-47f4-a2b4-b213b2710990-config-data\") pod \"nova-api-0\" (UID: \"098dda3e-e72b-47f4-a2b4-b213b2710990\") " pod="openstack/nova-api-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.096782 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/098dda3e-e72b-47f4-a2b4-b213b2710990-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"098dda3e-e72b-47f4-a2b4-b213b2710990\") " pod="openstack/nova-api-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.096825 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/098dda3e-e72b-47f4-a2b4-b213b2710990-logs\") pod \"nova-api-0\" (UID: \"098dda3e-e72b-47f4-a2b4-b213b2710990\") " pod="openstack/nova-api-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.096963 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qlwm\" (UniqueName: \"kubernetes.io/projected/098dda3e-e72b-47f4-a2b4-b213b2710990-kube-api-access-8qlwm\") pod \"nova-api-0\" (UID: \"098dda3e-e72b-47f4-a2b4-b213b2710990\") " pod="openstack/nova-api-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.097031 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bl4n2\" (UniqueName: \"kubernetes.io/projected/7ce26605-8dfc-48cd-a362-1a37c67ea300-kube-api-access-bl4n2\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.097049 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ce26605-8dfc-48cd-a362-1a37c67ea300-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.104029 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.192823 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ce26605-8dfc-48cd-a362-1a37c67ea300-config-data" (OuterVolumeSpecName: "config-data") pod "7ce26605-8dfc-48cd-a362-1a37c67ea300" (UID: "7ce26605-8dfc-48cd-a362-1a37c67ea300"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.198804 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qlwm\" (UniqueName: \"kubernetes.io/projected/098dda3e-e72b-47f4-a2b4-b213b2710990-kube-api-access-8qlwm\") pod \"nova-api-0\" (UID: \"098dda3e-e72b-47f4-a2b4-b213b2710990\") " pod="openstack/nova-api-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.198913 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/098dda3e-e72b-47f4-a2b4-b213b2710990-config-data\") pod \"nova-api-0\" (UID: \"098dda3e-e72b-47f4-a2b4-b213b2710990\") " pod="openstack/nova-api-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.199024 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/098dda3e-e72b-47f4-a2b4-b213b2710990-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"098dda3e-e72b-47f4-a2b4-b213b2710990\") " pod="openstack/nova-api-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.199060 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/098dda3e-e72b-47f4-a2b4-b213b2710990-logs\") pod \"nova-api-0\" (UID: \"098dda3e-e72b-47f4-a2b4-b213b2710990\") " pod="openstack/nova-api-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.199193 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ce26605-8dfc-48cd-a362-1a37c67ea300-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.199630 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/098dda3e-e72b-47f4-a2b4-b213b2710990-logs\") pod \"nova-api-0\" (UID: \"098dda3e-e72b-47f4-a2b4-b213b2710990\") " pod="openstack/nova-api-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.213573 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/098dda3e-e72b-47f4-a2b4-b213b2710990-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"098dda3e-e72b-47f4-a2b4-b213b2710990\") " pod="openstack/nova-api-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.217382 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/098dda3e-e72b-47f4-a2b4-b213b2710990-config-data\") pod \"nova-api-0\" (UID: \"098dda3e-e72b-47f4-a2b4-b213b2710990\") " pod="openstack/nova-api-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.221835 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ce26605-8dfc-48cd-a362-1a37c67ea300-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ce26605-8dfc-48cd-a362-1a37c67ea300" (UID: "7ce26605-8dfc-48cd-a362-1a37c67ea300"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.227142 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qlwm\" (UniqueName: \"kubernetes.io/projected/098dda3e-e72b-47f4-a2b4-b213b2710990-kube-api-access-8qlwm\") pod \"nova-api-0\" (UID: \"098dda3e-e72b-47f4-a2b4-b213b2710990\") " pod="openstack/nova-api-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.234183 4838 scope.go:117] "RemoveContainer" containerID="893e42201bfbe73cbc9cfb9c341cca8133d393c5a8b0b83423989ee93225e855" Feb 02 11:19:50 crc kubenswrapper[4838]: E0202 11:19:50.234632 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"893e42201bfbe73cbc9cfb9c341cca8133d393c5a8b0b83423989ee93225e855\": container with ID starting with 893e42201bfbe73cbc9cfb9c341cca8133d393c5a8b0b83423989ee93225e855 not found: ID does not exist" containerID="893e42201bfbe73cbc9cfb9c341cca8133d393c5a8b0b83423989ee93225e855" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.234662 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"893e42201bfbe73cbc9cfb9c341cca8133d393c5a8b0b83423989ee93225e855"} err="failed to get container status \"893e42201bfbe73cbc9cfb9c341cca8133d393c5a8b0b83423989ee93225e855\": rpc error: code = NotFound desc = could not find container \"893e42201bfbe73cbc9cfb9c341cca8133d393c5a8b0b83423989ee93225e855\": container with ID starting with 893e42201bfbe73cbc9cfb9c341cca8133d393c5a8b0b83423989ee93225e855 not found: ID does not exist" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.234683 4838 scope.go:117] "RemoveContainer" containerID="7e3f317c1e241798f96f230f8ba9eba56e2746b06de7e68b8bfd755058c0e297" Feb 02 11:19:50 crc kubenswrapper[4838]: E0202 11:19:50.237483 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e3f317c1e241798f96f230f8ba9eba56e2746b06de7e68b8bfd755058c0e297\": container with ID starting with 7e3f317c1e241798f96f230f8ba9eba56e2746b06de7e68b8bfd755058c0e297 not found: ID does not exist" containerID="7e3f317c1e241798f96f230f8ba9eba56e2746b06de7e68b8bfd755058c0e297" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.237512 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e3f317c1e241798f96f230f8ba9eba56e2746b06de7e68b8bfd755058c0e297"} err="failed to get container status \"7e3f317c1e241798f96f230f8ba9eba56e2746b06de7e68b8bfd755058c0e297\": rpc error: code = NotFound desc = could not find container \"7e3f317c1e241798f96f230f8ba9eba56e2746b06de7e68b8bfd755058c0e297\": container with ID starting with 7e3f317c1e241798f96f230f8ba9eba56e2746b06de7e68b8bfd755058c0e297 not found: ID does not exist" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.237530 4838 scope.go:117] "RemoveContainer" containerID="055ed90fce55d1295e3df2c4f7224b7f2ef165eba0200ab8cb13afa92a4cd6d0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.301001 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ce26605-8dfc-48cd-a362-1a37c67ea300-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.371780 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.418958 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.437196 4838 generic.go:334] "Generic (PLEG): container finished" podID="85cf2dd0-e8d9-4eba-a280-dba74e08cb91" containerID="625e818ba91644b17531ae65cf32edd2bc9cd2dec981b20e52fc83131711f658" exitCode=0 Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.437268 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"85cf2dd0-e8d9-4eba-a280-dba74e08cb91","Type":"ContainerDied","Data":"625e818ba91644b17531ae65cf32edd2bc9cd2dec981b20e52fc83131711f658"} Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.448450 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cf592065-13f1-4594-9642-6f7c039c42ad","Type":"ContainerStarted","Data":"bd2f1cd35141bdbd92bb940bd14c53131e1c96a6453b532363b73553b1eba69e"} Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.448509 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cf592065-13f1-4594-9642-6f7c039c42ad","Type":"ContainerStarted","Data":"5e548a758dcbbe3cc84622c561defaca4c28e6f678650f124f8185db5d79be8f"} Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.490774 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-5tpqb" event={"ID":"7ce26605-8dfc-48cd-a362-1a37c67ea300","Type":"ContainerDied","Data":"356dfba6041eaa50822378c9d084733e611dc746e56790bfaab83dd3c22ee6da"} Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.490817 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="356dfba6041eaa50822378c9d084733e611dc746e56790bfaab83dd3c22ee6da" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.490917 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-5tpqb" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.555796 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.555772788 podStartE2EDuration="2.555772788s" podCreationTimestamp="2026-02-02 11:19:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:19:50.491399374 +0000 UTC m=+1584.828500422" watchObservedRunningTime="2026-02-02 11:19:50.555772788 +0000 UTC m=+1584.892873826" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.609680 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98e7b3ca-1138-4df5-a2c4-f1b03b4352bb" path="/var/lib/kubelet/pods/98e7b3ca-1138-4df5-a2c4-f1b03b4352bb/volumes" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.610966 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c15c4499-e01e-474c-b653-efaf4af4c881" path="/var/lib/kubelet/pods/c15c4499-e01e-474c-b653-efaf4af4c881/volumes" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.614170 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7e894c9-38fb-4616-ab4b-68166a67b5b5" path="/var/lib/kubelet/pods/d7e894c9-38fb-4616-ab4b-68166a67b5b5/volumes" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.622642 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.624672 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.624770 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.627954 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.732313 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/966289ba-cb66-4cf4-adff-45ac19b18add-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"966289ba-cb66-4cf4-adff-45ac19b18add\") " pod="openstack/nova-cell1-conductor-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.732844 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/966289ba-cb66-4cf4-adff-45ac19b18add-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"966289ba-cb66-4cf4-adff-45ac19b18add\") " pod="openstack/nova-cell1-conductor-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.734563 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7l8jq\" (UniqueName: \"kubernetes.io/projected/966289ba-cb66-4cf4-adff-45ac19b18add-kube-api-access-7l8jq\") pod \"nova-cell1-conductor-0\" (UID: \"966289ba-cb66-4cf4-adff-45ac19b18add\") " pod="openstack/nova-cell1-conductor-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.755690 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.835953 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpftv\" (UniqueName: \"kubernetes.io/projected/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-kube-api-access-fpftv\") pod \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\" (UID: \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\") " Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.836062 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-logs\") pod \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\" (UID: \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\") " Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.836149 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-nova-metadata-tls-certs\") pod \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\" (UID: \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\") " Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.836251 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-config-data\") pod \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\" (UID: \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\") " Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.836417 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-combined-ca-bundle\") pod \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\" (UID: \"85cf2dd0-e8d9-4eba-a280-dba74e08cb91\") " Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.836894 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7l8jq\" (UniqueName: \"kubernetes.io/projected/966289ba-cb66-4cf4-adff-45ac19b18add-kube-api-access-7l8jq\") pod \"nova-cell1-conductor-0\" (UID: \"966289ba-cb66-4cf4-adff-45ac19b18add\") " pod="openstack/nova-cell1-conductor-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.837030 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/966289ba-cb66-4cf4-adff-45ac19b18add-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"966289ba-cb66-4cf4-adff-45ac19b18add\") " pod="openstack/nova-cell1-conductor-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.837137 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/966289ba-cb66-4cf4-adff-45ac19b18add-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"966289ba-cb66-4cf4-adff-45ac19b18add\") " pod="openstack/nova-cell1-conductor-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.837482 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-logs" (OuterVolumeSpecName: "logs") pod "85cf2dd0-e8d9-4eba-a280-dba74e08cb91" (UID: "85cf2dd0-e8d9-4eba-a280-dba74e08cb91"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.854991 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/966289ba-cb66-4cf4-adff-45ac19b18add-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"966289ba-cb66-4cf4-adff-45ac19b18add\") " pod="openstack/nova-cell1-conductor-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.855342 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-kube-api-access-fpftv" (OuterVolumeSpecName: "kube-api-access-fpftv") pod "85cf2dd0-e8d9-4eba-a280-dba74e08cb91" (UID: "85cf2dd0-e8d9-4eba-a280-dba74e08cb91"). InnerVolumeSpecName "kube-api-access-fpftv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.856098 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.880445 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/966289ba-cb66-4cf4-adff-45ac19b18add-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"966289ba-cb66-4cf4-adff-45ac19b18add\") " pod="openstack/nova-cell1-conductor-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.882897 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "85cf2dd0-e8d9-4eba-a280-dba74e08cb91" (UID: "85cf2dd0-e8d9-4eba-a280-dba74e08cb91"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.896671 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7l8jq\" (UniqueName: \"kubernetes.io/projected/966289ba-cb66-4cf4-adff-45ac19b18add-kube-api-access-7l8jq\") pod \"nova-cell1-conductor-0\" (UID: \"966289ba-cb66-4cf4-adff-45ac19b18add\") " pod="openstack/nova-cell1-conductor-0" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.900719 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-config-data" (OuterVolumeSpecName: "config-data") pod "85cf2dd0-e8d9-4eba-a280-dba74e08cb91" (UID: "85cf2dd0-e8d9-4eba-a280-dba74e08cb91"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.942630 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.942664 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpftv\" (UniqueName: \"kubernetes.io/projected/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-kube-api-access-fpftv\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.942676 4838 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-logs\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.942688 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:50 crc kubenswrapper[4838]: I0202 11:19:50.942991 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "85cf2dd0-e8d9-4eba-a280-dba74e08cb91" (UID: "85cf2dd0-e8d9-4eba-a280-dba74e08cb91"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.044386 4838 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/85cf2dd0-e8d9-4eba-a280-dba74e08cb91-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.094599 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.140797 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.509710 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"29a3c9fb-a43f-4867-94e9-dd205f0fb517","Type":"ContainerStarted","Data":"df8cefcea7c72928f89803e160f8d21e7d7eaea0734a0e4ee348c3de2d187cb0"} Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.510248 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.510265 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"29a3c9fb-a43f-4867-94e9-dd205f0fb517","Type":"ContainerStarted","Data":"328a2aa28adb9877d052791766680acf6482e9d992365f739dec4274e150a6f9"} Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.517606 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"098dda3e-e72b-47f4-a2b4-b213b2710990","Type":"ContainerStarted","Data":"3fa6906531577063158d4d8b8af272bd626a8537209c53a7be293759eaa7e132"} Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.517675 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"098dda3e-e72b-47f4-a2b4-b213b2710990","Type":"ContainerStarted","Data":"020148b2e199fecc2bb3b6aed4645db960c678f5e5f0ab9057c27765e62f4e4d"} Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.539081 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844","Type":"ContainerStarted","Data":"e7bc439ed8e9d1593dcf92013fd934e691bbaa9f4d9fef74e8565d98a5486051"} Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.545707 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"85cf2dd0-e8d9-4eba-a280-dba74e08cb91","Type":"ContainerDied","Data":"23f1efff56dd44dfc964a04a7299588b1c2e05f0c4d0c344f71b570b5f70aaec"} Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.545922 4838 scope.go:117] "RemoveContainer" containerID="625e818ba91644b17531ae65cf32edd2bc9cd2dec981b20e52fc83131711f658" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.545821 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.552479 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.552434701 podStartE2EDuration="2.552434701s" podCreationTimestamp="2026-02-02 11:19:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:19:51.542863716 +0000 UTC m=+1585.879964754" watchObservedRunningTime="2026-02-02 11:19:51.552434701 +0000 UTC m=+1585.889535739" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.642437 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.664813 4838 scope.go:117] "RemoveContainer" containerID="ae017bc95b2af3f6e54879b85b24b475f068b97103df3a18f1ae0841a7fb1d43" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.681917 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.700064 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.717790 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:19:51 crc kubenswrapper[4838]: E0202 11:19:51.718302 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85cf2dd0-e8d9-4eba-a280-dba74e08cb91" containerName="nova-metadata-log" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.718322 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="85cf2dd0-e8d9-4eba-a280-dba74e08cb91" containerName="nova-metadata-log" Feb 02 11:19:51 crc kubenswrapper[4838]: E0202 11:19:51.718373 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85cf2dd0-e8d9-4eba-a280-dba74e08cb91" containerName="nova-metadata-metadata" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.718381 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="85cf2dd0-e8d9-4eba-a280-dba74e08cb91" containerName="nova-metadata-metadata" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.718682 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="85cf2dd0-e8d9-4eba-a280-dba74e08cb91" containerName="nova-metadata-metadata" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.718721 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="85cf2dd0-e8d9-4eba-a280-dba74e08cb91" containerName="nova-metadata-log" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.719955 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.722919 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.727911 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.745738 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.761747 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd9b0ea2-d076-43f8-87f9-6491b526d025-config-data\") pod \"nova-metadata-0\" (UID: \"cd9b0ea2-d076-43f8-87f9-6491b526d025\") " pod="openstack/nova-metadata-0" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.761786 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b0ea2-d076-43f8-87f9-6491b526d025-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cd9b0ea2-d076-43f8-87f9-6491b526d025\") " pod="openstack/nova-metadata-0" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.761854 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd9b0ea2-d076-43f8-87f9-6491b526d025-logs\") pod \"nova-metadata-0\" (UID: \"cd9b0ea2-d076-43f8-87f9-6491b526d025\") " pod="openstack/nova-metadata-0" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.761949 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd9b0ea2-d076-43f8-87f9-6491b526d025-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cd9b0ea2-d076-43f8-87f9-6491b526d025\") " pod="openstack/nova-metadata-0" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.762045 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qk6xk\" (UniqueName: \"kubernetes.io/projected/cd9b0ea2-d076-43f8-87f9-6491b526d025-kube-api-access-qk6xk\") pod \"nova-metadata-0\" (UID: \"cd9b0ea2-d076-43f8-87f9-6491b526d025\") " pod="openstack/nova-metadata-0" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.863849 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qk6xk\" (UniqueName: \"kubernetes.io/projected/cd9b0ea2-d076-43f8-87f9-6491b526d025-kube-api-access-qk6xk\") pod \"nova-metadata-0\" (UID: \"cd9b0ea2-d076-43f8-87f9-6491b526d025\") " pod="openstack/nova-metadata-0" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.864174 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd9b0ea2-d076-43f8-87f9-6491b526d025-config-data\") pod \"nova-metadata-0\" (UID: \"cd9b0ea2-d076-43f8-87f9-6491b526d025\") " pod="openstack/nova-metadata-0" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.864261 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b0ea2-d076-43f8-87f9-6491b526d025-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cd9b0ea2-d076-43f8-87f9-6491b526d025\") " pod="openstack/nova-metadata-0" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.864430 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd9b0ea2-d076-43f8-87f9-6491b526d025-logs\") pod \"nova-metadata-0\" (UID: \"cd9b0ea2-d076-43f8-87f9-6491b526d025\") " pod="openstack/nova-metadata-0" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.864590 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd9b0ea2-d076-43f8-87f9-6491b526d025-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cd9b0ea2-d076-43f8-87f9-6491b526d025\") " pod="openstack/nova-metadata-0" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.865269 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd9b0ea2-d076-43f8-87f9-6491b526d025-logs\") pod \"nova-metadata-0\" (UID: \"cd9b0ea2-d076-43f8-87f9-6491b526d025\") " pod="openstack/nova-metadata-0" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.870816 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd9b0ea2-d076-43f8-87f9-6491b526d025-config-data\") pod \"nova-metadata-0\" (UID: \"cd9b0ea2-d076-43f8-87f9-6491b526d025\") " pod="openstack/nova-metadata-0" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.873248 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd9b0ea2-d076-43f8-87f9-6491b526d025-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cd9b0ea2-d076-43f8-87f9-6491b526d025\") " pod="openstack/nova-metadata-0" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.873574 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b0ea2-d076-43f8-87f9-6491b526d025-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cd9b0ea2-d076-43f8-87f9-6491b526d025\") " pod="openstack/nova-metadata-0" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.901217 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qk6xk\" (UniqueName: \"kubernetes.io/projected/cd9b0ea2-d076-43f8-87f9-6491b526d025-kube-api-access-qk6xk\") pod \"nova-metadata-0\" (UID: \"cd9b0ea2-d076-43f8-87f9-6491b526d025\") " pod="openstack/nova-metadata-0" Feb 02 11:19:51 crc kubenswrapper[4838]: I0202 11:19:51.912098 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 11:19:51 crc kubenswrapper[4838]: E0202 11:19:51.997287 4838 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/5f2fc33b4b8ba4dda490cfd3bd6f82eb1fc7bdb1f25ca80ace8378bcf2e72bf1/diff" to get inode usage: stat /var/lib/containers/storage/overlay/5f2fc33b4b8ba4dda490cfd3bd6f82eb1fc7bdb1f25ca80ace8378bcf2e72bf1/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_nova-cell0-conductor-0_c15c4499-e01e-474c-b653-efaf4af4c881/nova-cell0-conductor-conductor/0.log" to get inode usage: stat /var/log/pods/openstack_nova-cell0-conductor-0_c15c4499-e01e-474c-b653-efaf4af4c881/nova-cell0-conductor-conductor/0.log: no such file or directory Feb 02 11:19:52 crc kubenswrapper[4838]: I0202 11:19:52.524949 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85cf2dd0-e8d9-4eba-a280-dba74e08cb91" path="/var/lib/kubelet/pods/85cf2dd0-e8d9-4eba-a280-dba74e08cb91/volumes" Feb 02 11:19:52 crc kubenswrapper[4838]: I0202 11:19:52.567464 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"098dda3e-e72b-47f4-a2b4-b213b2710990","Type":"ContainerStarted","Data":"428e64d6af02a52beee4311b8cf4444fdd2c078430cc23a25c8d145a929b03a2"} Feb 02 11:19:52 crc kubenswrapper[4838]: I0202 11:19:52.582017 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:19:52 crc kubenswrapper[4838]: I0202 11:19:52.584284 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844","Type":"ContainerStarted","Data":"6273287249d78e66e69e3090ecb23467d3a6564d67bca5a537a33286cb95af24"} Feb 02 11:19:52 crc kubenswrapper[4838]: I0202 11:19:52.591738 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"966289ba-cb66-4cf4-adff-45ac19b18add","Type":"ContainerStarted","Data":"a244a56fef6ac73187d6203d96b5122f8413a82a00a7933e6cdb3bc43828aa53"} Feb 02 11:19:52 crc kubenswrapper[4838]: I0202 11:19:52.591857 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Feb 02 11:19:52 crc kubenswrapper[4838]: I0202 11:19:52.591877 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"966289ba-cb66-4cf4-adff-45ac19b18add","Type":"ContainerStarted","Data":"15b2629cee50a8b8c8e8bb86521b52635077c5de23553a7a2b618105a1c79073"} Feb 02 11:19:52 crc kubenswrapper[4838]: I0202 11:19:52.597451 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.59742462 podStartE2EDuration="3.59742462s" podCreationTimestamp="2026-02-02 11:19:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:19:52.586504929 +0000 UTC m=+1586.923605967" watchObservedRunningTime="2026-02-02 11:19:52.59742462 +0000 UTC m=+1586.934525678" Feb 02 11:19:52 crc kubenswrapper[4838]: I0202 11:19:52.632731 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.63270785 podStartE2EDuration="2.63270785s" podCreationTimestamp="2026-02-02 11:19:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:19:52.624395488 +0000 UTC m=+1586.961496516" watchObservedRunningTime="2026-02-02 11:19:52.63270785 +0000 UTC m=+1586.969808888" Feb 02 11:19:53 crc kubenswrapper[4838]: I0202 11:19:53.599508 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844","Type":"ContainerStarted","Data":"9dce7b9504df09cd7e5d4028c315aaf338b1b59e11bcacc59433b30faf9f4adf"} Feb 02 11:19:53 crc kubenswrapper[4838]: I0202 11:19:53.604756 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cd9b0ea2-d076-43f8-87f9-6491b526d025","Type":"ContainerStarted","Data":"d53be724496dbbd590059689260af49ff69e481140da43f2dea56654f613dc4a"} Feb 02 11:19:53 crc kubenswrapper[4838]: I0202 11:19:53.604816 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cd9b0ea2-d076-43f8-87f9-6491b526d025","Type":"ContainerStarted","Data":"ba2b37934b1e825f87fd76dafdbc49dbf642370474af8bc236f4d85e3fb26e1a"} Feb 02 11:19:53 crc kubenswrapper[4838]: I0202 11:19:53.604831 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cd9b0ea2-d076-43f8-87f9-6491b526d025","Type":"ContainerStarted","Data":"2a7abc64fa6c9bfc7fd7318477fb07b58b5a598242fdda0d24dfbddd6cfbefea"} Feb 02 11:19:53 crc kubenswrapper[4838]: I0202 11:19:53.629407 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.629388612 podStartE2EDuration="2.629388612s" podCreationTimestamp="2026-02-02 11:19:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:19:53.626911626 +0000 UTC m=+1587.964012664" watchObservedRunningTime="2026-02-02 11:19:53.629388612 +0000 UTC m=+1587.966489640" Feb 02 11:19:53 crc kubenswrapper[4838]: W0202 11:19:53.647114 4838 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81309430_b454_4c48_95da_f3dbed0ad937.slice/crio-88feb5fd9b891083958fe7aaef1505b23186a81da36a3676fbe0c4b92c34f961.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81309430_b454_4c48_95da_f3dbed0ad937.slice/crio-88feb5fd9b891083958fe7aaef1505b23186a81da36a3676fbe0c4b92c34f961.scope: no such file or directory Feb 02 11:19:53 crc kubenswrapper[4838]: W0202 11:19:53.647216 4838 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda451fb50_63dc_4dcc_8cf0_37e7f3d99888.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda451fb50_63dc_4dcc_8cf0_37e7f3d99888.slice: no such file or directory Feb 02 11:19:53 crc kubenswrapper[4838]: W0202 11:19:53.647272 4838 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod98e7b3ca_1138_4df5_a2c4_f1b03b4352bb.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod98e7b3ca_1138_4df5_a2c4_f1b03b4352bb.slice: no such file or directory Feb 02 11:19:53 crc kubenswrapper[4838]: W0202 11:19:53.647847 4838 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd7e894c9_38fb_4616_ab4b_68166a67b5b5.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd7e894c9_38fb_4616_ab4b_68166a67b5b5.slice: no such file or directory Feb 02 11:19:53 crc kubenswrapper[4838]: W0202 11:19:53.647872 4838 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85cf2dd0_e8d9_4eba_a280_dba74e08cb91.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85cf2dd0_e8d9_4eba_a280_dba74e08cb91.slice: no such file or directory Feb 02 11:19:53 crc kubenswrapper[4838]: W0202 11:19:53.649351 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81309430_b454_4c48_95da_f3dbed0ad937.slice/crio-02d6c25e0687a436d3f24eebf3c27c8a62c3e9f0297686c2354ec69403eee35e WatchSource:0}: Error finding container 02d6c25e0687a436d3f24eebf3c27c8a62c3e9f0297686c2354ec69403eee35e: Status 404 returned error can't find the container with id 02d6c25e0687a436d3f24eebf3c27c8a62c3e9f0297686c2354ec69403eee35e Feb 02 11:19:53 crc kubenswrapper[4838]: W0202 11:19:53.650301 4838 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81309430_b454_4c48_95da_f3dbed0ad937.slice/crio-conmon-a81d6b991b50a8abcd15e12c426be988a9c2fb0dba0aa6c3f8ee47116aedb1cd.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81309430_b454_4c48_95da_f3dbed0ad937.slice/crio-conmon-a81d6b991b50a8abcd15e12c426be988a9c2fb0dba0aa6c3f8ee47116aedb1cd.scope: no such file or directory Feb 02 11:19:53 crc kubenswrapper[4838]: W0202 11:19:53.650336 4838 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81309430_b454_4c48_95da_f3dbed0ad937.slice/crio-a81d6b991b50a8abcd15e12c426be988a9c2fb0dba0aa6c3f8ee47116aedb1cd.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81309430_b454_4c48_95da_f3dbed0ad937.slice/crio-a81d6b991b50a8abcd15e12c426be988a9c2fb0dba0aa6c3f8ee47116aedb1cd.scope: no such file or directory Feb 02 11:19:53 crc kubenswrapper[4838]: W0202 11:19:53.651226 4838 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81309430_b454_4c48_95da_f3dbed0ad937.slice/crio-conmon-e2f33d2573c5cb2fdef9776a6bac12f17737182effda706f908a89d82eca1ed3.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81309430_b454_4c48_95da_f3dbed0ad937.slice/crio-conmon-e2f33d2573c5cb2fdef9776a6bac12f17737182effda706f908a89d82eca1ed3.scope: no such file or directory Feb 02 11:19:53 crc kubenswrapper[4838]: W0202 11:19:53.651295 4838 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81309430_b454_4c48_95da_f3dbed0ad937.slice/crio-e2f33d2573c5cb2fdef9776a6bac12f17737182effda706f908a89d82eca1ed3.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81309430_b454_4c48_95da_f3dbed0ad937.slice/crio-e2f33d2573c5cb2fdef9776a6bac12f17737182effda706f908a89d82eca1ed3.scope: no such file or directory Feb 02 11:19:53 crc kubenswrapper[4838]: E0202 11:19:53.657105 4838 manager.go:1116] Failed to create existing container: /kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod57bf0b31_2ff8_44ad_a509_be8868430dd2.slice/crio-c9abda5bfd3c38cb8ce3f3904188986a22cbb8dabd7ceb51b651ade538eed826: Error finding container c9abda5bfd3c38cb8ce3f3904188986a22cbb8dabd7ceb51b651ade538eed826: Status 404 returned error can't find the container with id c9abda5bfd3c38cb8ce3f3904188986a22cbb8dabd7ceb51b651ade538eed826 Feb 02 11:19:53 crc kubenswrapper[4838]: E0202 11:19:53.661109 4838 manager.go:1116] Failed to create existing container: /kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod232b1b0a_b35d_4834_a4bb_3a1ed7de6f71.slice/crio-adddfa0a3b712e2ee614e717c4aa9923456562be9403be804d30246b1c61579f: Error finding container adddfa0a3b712e2ee614e717c4aa9923456562be9403be804d30246b1c61579f: Status 404 returned error can't find the container with id adddfa0a3b712e2ee614e717c4aa9923456562be9403be804d30246b1c61579f Feb 02 11:19:53 crc kubenswrapper[4838]: E0202 11:19:53.661321 4838 manager.go:1116] Failed to create existing container: /kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf97c9e6f_a864_4903_b775_0cef0afed268.slice/crio-9ea1b9d4ee7edbd1db5e014b145cc700a409a72d3e718899e8b31f0676d868d7: Error finding container 9ea1b9d4ee7edbd1db5e014b145cc700a409a72d3e718899e8b31f0676d868d7: Status 404 returned error can't find the container with id 9ea1b9d4ee7edbd1db5e014b145cc700a409a72d3e718899e8b31f0676d868d7 Feb 02 11:19:53 crc kubenswrapper[4838]: I0202 11:19:53.758552 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 02 11:19:53 crc kubenswrapper[4838]: E0202 11:19:53.973541 4838 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc15c4499_e01e_474c_b653_efaf4af4c881.slice/crio-c7b210765ba5020ed5046659450c59367e9a0045e3dd03e1b0081661fd65dfe2\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod232b1b0a_b35d_4834_a4bb_3a1ed7de6f71.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81309430_b454_4c48_95da_f3dbed0ad937.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ce26605_8dfc_48cd_a362_1a37c67ea300.slice/crio-356dfba6041eaa50822378c9d084733e611dc746e56790bfaab83dd3c22ee6da\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97a5f67e_673d_47c3_826d_75f217906282.slice/crio-conmon-458c7590961d20f9c73148037bc4e0fdfac81792313d161742c1225e964a45db.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ce26605_8dfc_48cd_a362_1a37c67ea300.slice/crio-conmon-4823bcd344d625ad561c7b904f665bccc6207c62fe0a225cf8d050bc4a5c40fd.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod232b1b0a_b35d_4834_a4bb_3a1ed7de6f71.slice/crio-010eaa33f02c4787fa4116a23dc7014dbc08122d9a29861bd4c02497dc2f3fa3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97a5f67e_673d_47c3_826d_75f217906282.slice/crio-458c7590961d20f9c73148037bc4e0fdfac81792313d161742c1225e964a45db.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ce26605_8dfc_48cd_a362_1a37c67ea300.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc15c4499_e01e_474c_b653_efaf4af4c881.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc15c4499_e01e_474c_b653_efaf4af4c881.slice/crio-055ed90fce55d1295e3df2c4f7224b7f2ef165eba0200ab8cb13afa92a4cd6d0.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod232b1b0a_b35d_4834_a4bb_3a1ed7de6f71.slice/crio-conmon-010eaa33f02c4787fa4116a23dc7014dbc08122d9a29861bd4c02497dc2f3fa3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ce26605_8dfc_48cd_a362_1a37c67ea300.slice/crio-4823bcd344d625ad561c7b904f665bccc6207c62fe0a225cf8d050bc4a5c40fd.scope\": RecentStats: unable to find data in memory cache]" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.296136 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.459362 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97a5f67e-673d-47c3-826d-75f217906282-combined-ca-bundle\") pod \"97a5f67e-673d-47c3-826d-75f217906282\" (UID: \"97a5f67e-673d-47c3-826d-75f217906282\") " Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.459435 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nfrj7\" (UniqueName: \"kubernetes.io/projected/97a5f67e-673d-47c3-826d-75f217906282-kube-api-access-nfrj7\") pod \"97a5f67e-673d-47c3-826d-75f217906282\" (UID: \"97a5f67e-673d-47c3-826d-75f217906282\") " Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.459533 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97a5f67e-673d-47c3-826d-75f217906282-config-data\") pod \"97a5f67e-673d-47c3-826d-75f217906282\" (UID: \"97a5f67e-673d-47c3-826d-75f217906282\") " Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.468901 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97a5f67e-673d-47c3-826d-75f217906282-kube-api-access-nfrj7" (OuterVolumeSpecName: "kube-api-access-nfrj7") pod "97a5f67e-673d-47c3-826d-75f217906282" (UID: "97a5f67e-673d-47c3-826d-75f217906282"). InnerVolumeSpecName "kube-api-access-nfrj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.503219 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97a5f67e-673d-47c3-826d-75f217906282-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "97a5f67e-673d-47c3-826d-75f217906282" (UID: "97a5f67e-673d-47c3-826d-75f217906282"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.544085 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97a5f67e-673d-47c3-826d-75f217906282-config-data" (OuterVolumeSpecName: "config-data") pod "97a5f67e-673d-47c3-826d-75f217906282" (UID: "97a5f67e-673d-47c3-826d-75f217906282"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.563021 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97a5f67e-673d-47c3-826d-75f217906282-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.563066 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nfrj7\" (UniqueName: \"kubernetes.io/projected/97a5f67e-673d-47c3-826d-75f217906282-kube-api-access-nfrj7\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.563079 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97a5f67e-673d-47c3-826d-75f217906282-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.620055 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844","Type":"ContainerStarted","Data":"2cd555b5cefee893aed1fed93bd0885d7e6eb91778f7d5f88561925443d3b326"} Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.623694 4838 generic.go:334] "Generic (PLEG): container finished" podID="97a5f67e-673d-47c3-826d-75f217906282" containerID="458c7590961d20f9c73148037bc4e0fdfac81792313d161742c1225e964a45db" exitCode=137 Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.625302 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.625974 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"97a5f67e-673d-47c3-826d-75f217906282","Type":"ContainerDied","Data":"458c7590961d20f9c73148037bc4e0fdfac81792313d161742c1225e964a45db"} Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.626005 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"97a5f67e-673d-47c3-826d-75f217906282","Type":"ContainerDied","Data":"288dc7f2953eaaa631b4d98e29bfdaaf6987c69f5cbf155c4f708d0a9147db2d"} Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.626028 4838 scope.go:117] "RemoveContainer" containerID="458c7590961d20f9c73148037bc4e0fdfac81792313d161742c1225e964a45db" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.657906 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.671728 4838 scope.go:117] "RemoveContainer" containerID="458c7590961d20f9c73148037bc4e0fdfac81792313d161742c1225e964a45db" Feb 02 11:19:54 crc kubenswrapper[4838]: E0202 11:19:54.678092 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"458c7590961d20f9c73148037bc4e0fdfac81792313d161742c1225e964a45db\": container with ID starting with 458c7590961d20f9c73148037bc4e0fdfac81792313d161742c1225e964a45db not found: ID does not exist" containerID="458c7590961d20f9c73148037bc4e0fdfac81792313d161742c1225e964a45db" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.678164 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"458c7590961d20f9c73148037bc4e0fdfac81792313d161742c1225e964a45db"} err="failed to get container status \"458c7590961d20f9c73148037bc4e0fdfac81792313d161742c1225e964a45db\": rpc error: code = NotFound desc = could not find container \"458c7590961d20f9c73148037bc4e0fdfac81792313d161742c1225e964a45db\": container with ID starting with 458c7590961d20f9c73148037bc4e0fdfac81792313d161742c1225e964a45db not found: ID does not exist" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.687983 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.719715 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 11:19:54 crc kubenswrapper[4838]: E0202 11:19:54.720213 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97a5f67e-673d-47c3-826d-75f217906282" containerName="nova-cell1-novncproxy-novncproxy" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.720233 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="97a5f67e-673d-47c3-826d-75f217906282" containerName="nova-cell1-novncproxy-novncproxy" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.720447 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="97a5f67e-673d-47c3-826d-75f217906282" containerName="nova-cell1-novncproxy-novncproxy" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.721148 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.725146 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.725356 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.725682 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.738950 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.868208 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e38e111-e96a-4196-84d2-9f6f2cd192dc-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e38e111-e96a-4196-84d2-9f6f2cd192dc\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.868584 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e38e111-e96a-4196-84d2-9f6f2cd192dc-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e38e111-e96a-4196-84d2-9f6f2cd192dc\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.868827 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjwld\" (UniqueName: \"kubernetes.io/projected/7e38e111-e96a-4196-84d2-9f6f2cd192dc-kube-api-access-tjwld\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e38e111-e96a-4196-84d2-9f6f2cd192dc\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.868898 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e38e111-e96a-4196-84d2-9f6f2cd192dc-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e38e111-e96a-4196-84d2-9f6f2cd192dc\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.868952 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e38e111-e96a-4196-84d2-9f6f2cd192dc-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e38e111-e96a-4196-84d2-9f6f2cd192dc\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.970480 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e38e111-e96a-4196-84d2-9f6f2cd192dc-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e38e111-e96a-4196-84d2-9f6f2cd192dc\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.970602 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjwld\" (UniqueName: \"kubernetes.io/projected/7e38e111-e96a-4196-84d2-9f6f2cd192dc-kube-api-access-tjwld\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e38e111-e96a-4196-84d2-9f6f2cd192dc\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.970654 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e38e111-e96a-4196-84d2-9f6f2cd192dc-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e38e111-e96a-4196-84d2-9f6f2cd192dc\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.970707 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e38e111-e96a-4196-84d2-9f6f2cd192dc-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e38e111-e96a-4196-84d2-9f6f2cd192dc\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.970822 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e38e111-e96a-4196-84d2-9f6f2cd192dc-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e38e111-e96a-4196-84d2-9f6f2cd192dc\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.987417 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e38e111-e96a-4196-84d2-9f6f2cd192dc-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e38e111-e96a-4196-84d2-9f6f2cd192dc\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.988110 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e38e111-e96a-4196-84d2-9f6f2cd192dc-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e38e111-e96a-4196-84d2-9f6f2cd192dc\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.988293 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e38e111-e96a-4196-84d2-9f6f2cd192dc-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e38e111-e96a-4196-84d2-9f6f2cd192dc\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.990869 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e38e111-e96a-4196-84d2-9f6f2cd192dc-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e38e111-e96a-4196-84d2-9f6f2cd192dc\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:54 crc kubenswrapper[4838]: I0202 11:19:54.992875 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjwld\" (UniqueName: \"kubernetes.io/projected/7e38e111-e96a-4196-84d2-9f6f2cd192dc-kube-api-access-tjwld\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e38e111-e96a-4196-84d2-9f6f2cd192dc\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:55 crc kubenswrapper[4838]: I0202 11:19:55.047398 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:19:55 crc kubenswrapper[4838]: I0202 11:19:55.705108 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 11:19:56 crc kubenswrapper[4838]: I0202 11:19:56.533847 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97a5f67e-673d-47c3-826d-75f217906282" path="/var/lib/kubelet/pods/97a5f67e-673d-47c3-826d-75f217906282/volumes" Feb 02 11:19:56 crc kubenswrapper[4838]: I0202 11:19:56.647518 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"7e38e111-e96a-4196-84d2-9f6f2cd192dc","Type":"ContainerStarted","Data":"88148ee114de2d550fbae5c57529d4da3ea4697422df2912176859f02533c419"} Feb 02 11:19:56 crc kubenswrapper[4838]: I0202 11:19:56.647577 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"7e38e111-e96a-4196-84d2-9f6f2cd192dc","Type":"ContainerStarted","Data":"d2bed3812d963ff41be4f43e62ed7b77de445946fafc18514208eb785a09584e"} Feb 02 11:19:56 crc kubenswrapper[4838]: I0202 11:19:56.672519 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.672496534 podStartE2EDuration="2.672496534s" podCreationTimestamp="2026-02-02 11:19:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:19:56.6648415 +0000 UTC m=+1591.001942528" watchObservedRunningTime="2026-02-02 11:19:56.672496534 +0000 UTC m=+1591.009597562" Feb 02 11:19:56 crc kubenswrapper[4838]: I0202 11:19:56.913321 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 11:19:56 crc kubenswrapper[4838]: I0202 11:19:56.913387 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 11:19:57 crc kubenswrapper[4838]: I0202 11:19:57.663220 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844","Type":"ContainerStarted","Data":"f9e793113d17cc6cf2ed43d46c0e13f1ccfd3131091aefaa2512f6235f884bb4"} Feb 02 11:19:57 crc kubenswrapper[4838]: I0202 11:19:57.703107 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.038654688 podStartE2EDuration="8.703087119s" podCreationTimestamp="2026-02-02 11:19:49 +0000 UTC" firstStartedPulling="2026-02-02 11:19:50.878612576 +0000 UTC m=+1585.215713604" lastFinishedPulling="2026-02-02 11:19:56.543045007 +0000 UTC m=+1590.880146035" observedRunningTime="2026-02-02 11:19:57.691771368 +0000 UTC m=+1592.028872406" watchObservedRunningTime="2026-02-02 11:19:57.703087119 +0000 UTC m=+1592.040188167" Feb 02 11:19:58 crc kubenswrapper[4838]: I0202 11:19:58.677840 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 11:19:58 crc kubenswrapper[4838]: I0202 11:19:58.758266 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 02 11:19:58 crc kubenswrapper[4838]: I0202 11:19:58.843330 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 02 11:19:59 crc kubenswrapper[4838]: I0202 11:19:59.712442 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 02 11:19:59 crc kubenswrapper[4838]: I0202 11:19:59.802307 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Feb 02 11:20:00 crc kubenswrapper[4838]: I0202 11:20:00.047875 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:20:00 crc kubenswrapper[4838]: I0202 11:20:00.420189 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 11:20:00 crc kubenswrapper[4838]: I0202 11:20:00.420246 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 11:20:01 crc kubenswrapper[4838]: I0202 11:20:01.124160 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Feb 02 11:20:01 crc kubenswrapper[4838]: I0202 11:20:01.503192 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="098dda3e-e72b-47f4-a2b4-b213b2710990" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.214:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 11:20:01 crc kubenswrapper[4838]: I0202 11:20:01.503405 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="098dda3e-e72b-47f4-a2b4-b213b2710990" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.214:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 11:20:01 crc kubenswrapper[4838]: I0202 11:20:01.914844 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 02 11:20:01 crc kubenswrapper[4838]: I0202 11:20:01.914900 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 02 11:20:01 crc kubenswrapper[4838]: I0202 11:20:01.921143 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-55db57f78d-mdj8d" Feb 02 11:20:02 crc kubenswrapper[4838]: I0202 11:20:02.928761 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="cd9b0ea2-d076-43f8-87f9-6491b526d025" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.216:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 11:20:02 crc kubenswrapper[4838]: I0202 11:20:02.928845 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="cd9b0ea2-d076-43f8-87f9-6491b526d025" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.216:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 11:20:04 crc kubenswrapper[4838]: I0202 11:20:04.051420 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-595df946b7-5b7qm" Feb 02 11:20:04 crc kubenswrapper[4838]: I0202 11:20:04.155216 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-55db57f78d-mdj8d"] Feb 02 11:20:04 crc kubenswrapper[4838]: I0202 11:20:04.155477 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-55db57f78d-mdj8d" podUID="7d6708b7-91c5-4090-8e1f-60061ca37055" containerName="neutron-api" containerID="cri-o://82660f60e665882b502595c6b18c8dfda06ee7965108b4b37daf07039c16abcd" gracePeriod=30 Feb 02 11:20:04 crc kubenswrapper[4838]: I0202 11:20:04.155656 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-55db57f78d-mdj8d" podUID="7d6708b7-91c5-4090-8e1f-60061ca37055" containerName="neutron-httpd" containerID="cri-o://4526e7d2d33a8a9946a0b12ad48b9cb8e7dc9dd25c6ee4956284f9466b4bfd04" gracePeriod=30 Feb 02 11:20:04 crc kubenswrapper[4838]: I0202 11:20:04.739079 4838 generic.go:334] "Generic (PLEG): container finished" podID="7d6708b7-91c5-4090-8e1f-60061ca37055" containerID="4526e7d2d33a8a9946a0b12ad48b9cb8e7dc9dd25c6ee4956284f9466b4bfd04" exitCode=0 Feb 02 11:20:04 crc kubenswrapper[4838]: I0202 11:20:04.739456 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55db57f78d-mdj8d" event={"ID":"7d6708b7-91c5-4090-8e1f-60061ca37055","Type":"ContainerDied","Data":"4526e7d2d33a8a9946a0b12ad48b9cb8e7dc9dd25c6ee4956284f9466b4bfd04"} Feb 02 11:20:05 crc kubenswrapper[4838]: I0202 11:20:05.048483 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:20:05 crc kubenswrapper[4838]: I0202 11:20:05.066980 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:20:05 crc kubenswrapper[4838]: I0202 11:20:05.764954 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Feb 02 11:20:05 crc kubenswrapper[4838]: I0202 11:20:05.938077 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-9ln9l"] Feb 02 11:20:05 crc kubenswrapper[4838]: I0202 11:20:05.939675 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-9ln9l" Feb 02 11:20:05 crc kubenswrapper[4838]: I0202 11:20:05.944146 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Feb 02 11:20:05 crc kubenswrapper[4838]: I0202 11:20:05.944146 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Feb 02 11:20:05 crc kubenswrapper[4838]: I0202 11:20:05.948326 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-9ln9l"] Feb 02 11:20:06 crc kubenswrapper[4838]: I0202 11:20:06.027890 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ecc1cc-2175-4372-ae32-61761f66a342-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-9ln9l\" (UID: \"b6ecc1cc-2175-4372-ae32-61761f66a342\") " pod="openstack/nova-cell1-cell-mapping-9ln9l" Feb 02 11:20:06 crc kubenswrapper[4838]: I0202 11:20:06.027952 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6ecc1cc-2175-4372-ae32-61761f66a342-scripts\") pod \"nova-cell1-cell-mapping-9ln9l\" (UID: \"b6ecc1cc-2175-4372-ae32-61761f66a342\") " pod="openstack/nova-cell1-cell-mapping-9ln9l" Feb 02 11:20:06 crc kubenswrapper[4838]: I0202 11:20:06.028030 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6ecc1cc-2175-4372-ae32-61761f66a342-config-data\") pod \"nova-cell1-cell-mapping-9ln9l\" (UID: \"b6ecc1cc-2175-4372-ae32-61761f66a342\") " pod="openstack/nova-cell1-cell-mapping-9ln9l" Feb 02 11:20:06 crc kubenswrapper[4838]: I0202 11:20:06.028069 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pmq2\" (UniqueName: \"kubernetes.io/projected/b6ecc1cc-2175-4372-ae32-61761f66a342-kube-api-access-9pmq2\") pod \"nova-cell1-cell-mapping-9ln9l\" (UID: \"b6ecc1cc-2175-4372-ae32-61761f66a342\") " pod="openstack/nova-cell1-cell-mapping-9ln9l" Feb 02 11:20:06 crc kubenswrapper[4838]: I0202 11:20:06.131024 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ecc1cc-2175-4372-ae32-61761f66a342-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-9ln9l\" (UID: \"b6ecc1cc-2175-4372-ae32-61761f66a342\") " pod="openstack/nova-cell1-cell-mapping-9ln9l" Feb 02 11:20:06 crc kubenswrapper[4838]: I0202 11:20:06.131083 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6ecc1cc-2175-4372-ae32-61761f66a342-scripts\") pod \"nova-cell1-cell-mapping-9ln9l\" (UID: \"b6ecc1cc-2175-4372-ae32-61761f66a342\") " pod="openstack/nova-cell1-cell-mapping-9ln9l" Feb 02 11:20:06 crc kubenswrapper[4838]: I0202 11:20:06.131137 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6ecc1cc-2175-4372-ae32-61761f66a342-config-data\") pod \"nova-cell1-cell-mapping-9ln9l\" (UID: \"b6ecc1cc-2175-4372-ae32-61761f66a342\") " pod="openstack/nova-cell1-cell-mapping-9ln9l" Feb 02 11:20:06 crc kubenswrapper[4838]: I0202 11:20:06.131167 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pmq2\" (UniqueName: \"kubernetes.io/projected/b6ecc1cc-2175-4372-ae32-61761f66a342-kube-api-access-9pmq2\") pod \"nova-cell1-cell-mapping-9ln9l\" (UID: \"b6ecc1cc-2175-4372-ae32-61761f66a342\") " pod="openstack/nova-cell1-cell-mapping-9ln9l" Feb 02 11:20:06 crc kubenswrapper[4838]: I0202 11:20:06.138130 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6ecc1cc-2175-4372-ae32-61761f66a342-scripts\") pod \"nova-cell1-cell-mapping-9ln9l\" (UID: \"b6ecc1cc-2175-4372-ae32-61761f66a342\") " pod="openstack/nova-cell1-cell-mapping-9ln9l" Feb 02 11:20:06 crc kubenswrapper[4838]: I0202 11:20:06.138822 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6ecc1cc-2175-4372-ae32-61761f66a342-config-data\") pod \"nova-cell1-cell-mapping-9ln9l\" (UID: \"b6ecc1cc-2175-4372-ae32-61761f66a342\") " pod="openstack/nova-cell1-cell-mapping-9ln9l" Feb 02 11:20:06 crc kubenswrapper[4838]: I0202 11:20:06.141427 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ecc1cc-2175-4372-ae32-61761f66a342-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-9ln9l\" (UID: \"b6ecc1cc-2175-4372-ae32-61761f66a342\") " pod="openstack/nova-cell1-cell-mapping-9ln9l" Feb 02 11:20:06 crc kubenswrapper[4838]: I0202 11:20:06.180294 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pmq2\" (UniqueName: \"kubernetes.io/projected/b6ecc1cc-2175-4372-ae32-61761f66a342-kube-api-access-9pmq2\") pod \"nova-cell1-cell-mapping-9ln9l\" (UID: \"b6ecc1cc-2175-4372-ae32-61761f66a342\") " pod="openstack/nova-cell1-cell-mapping-9ln9l" Feb 02 11:20:06 crc kubenswrapper[4838]: I0202 11:20:06.271848 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-9ln9l" Feb 02 11:20:06 crc kubenswrapper[4838]: I0202 11:20:06.798709 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-9ln9l"] Feb 02 11:20:06 crc kubenswrapper[4838]: W0202 11:20:06.805760 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6ecc1cc_2175_4372_ae32_61761f66a342.slice/crio-08bf5299bded889c56457c36a306c9f5f722451a9449edc497097cefbee7c0d9 WatchSource:0}: Error finding container 08bf5299bded889c56457c36a306c9f5f722451a9449edc497097cefbee7c0d9: Status 404 returned error can't find the container with id 08bf5299bded889c56457c36a306c9f5f722451a9449edc497097cefbee7c0d9 Feb 02 11:20:07 crc kubenswrapper[4838]: I0202 11:20:07.770271 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-9ln9l" event={"ID":"b6ecc1cc-2175-4372-ae32-61761f66a342","Type":"ContainerStarted","Data":"8426a6e4d214510a52f278be0c850961a4eb3e86434e21bbda26e1cacba5f3a2"} Feb 02 11:20:07 crc kubenswrapper[4838]: I0202 11:20:07.771010 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-9ln9l" event={"ID":"b6ecc1cc-2175-4372-ae32-61761f66a342","Type":"ContainerStarted","Data":"08bf5299bded889c56457c36a306c9f5f722451a9449edc497097cefbee7c0d9"} Feb 02 11:20:07 crc kubenswrapper[4838]: I0202 11:20:07.805424 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-9ln9l" podStartSLOduration=2.805394796 podStartE2EDuration="2.805394796s" podCreationTimestamp="2026-02-02 11:20:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:20:07.793075798 +0000 UTC m=+1602.130176836" watchObservedRunningTime="2026-02-02 11:20:07.805394796 +0000 UTC m=+1602.142495824" Feb 02 11:20:08 crc kubenswrapper[4838]: I0202 11:20:08.403746 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-h5g6n"] Feb 02 11:20:08 crc kubenswrapper[4838]: I0202 11:20:08.406167 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5g6n" Feb 02 11:20:08 crc kubenswrapper[4838]: I0202 11:20:08.415028 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5g6n"] Feb 02 11:20:08 crc kubenswrapper[4838]: I0202 11:20:08.479325 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkc8m\" (UniqueName: \"kubernetes.io/projected/6cdce19a-2872-4dea-94d1-6497f104c890-kube-api-access-jkc8m\") pod \"redhat-marketplace-h5g6n\" (UID: \"6cdce19a-2872-4dea-94d1-6497f104c890\") " pod="openshift-marketplace/redhat-marketplace-h5g6n" Feb 02 11:20:08 crc kubenswrapper[4838]: I0202 11:20:08.479382 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cdce19a-2872-4dea-94d1-6497f104c890-catalog-content\") pod \"redhat-marketplace-h5g6n\" (UID: \"6cdce19a-2872-4dea-94d1-6497f104c890\") " pod="openshift-marketplace/redhat-marketplace-h5g6n" Feb 02 11:20:08 crc kubenswrapper[4838]: I0202 11:20:08.479448 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cdce19a-2872-4dea-94d1-6497f104c890-utilities\") pod \"redhat-marketplace-h5g6n\" (UID: \"6cdce19a-2872-4dea-94d1-6497f104c890\") " pod="openshift-marketplace/redhat-marketplace-h5g6n" Feb 02 11:20:08 crc kubenswrapper[4838]: I0202 11:20:08.580890 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cdce19a-2872-4dea-94d1-6497f104c890-utilities\") pod \"redhat-marketplace-h5g6n\" (UID: \"6cdce19a-2872-4dea-94d1-6497f104c890\") " pod="openshift-marketplace/redhat-marketplace-h5g6n" Feb 02 11:20:08 crc kubenswrapper[4838]: I0202 11:20:08.582951 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkc8m\" (UniqueName: \"kubernetes.io/projected/6cdce19a-2872-4dea-94d1-6497f104c890-kube-api-access-jkc8m\") pod \"redhat-marketplace-h5g6n\" (UID: \"6cdce19a-2872-4dea-94d1-6497f104c890\") " pod="openshift-marketplace/redhat-marketplace-h5g6n" Feb 02 11:20:08 crc kubenswrapper[4838]: I0202 11:20:08.583041 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cdce19a-2872-4dea-94d1-6497f104c890-catalog-content\") pod \"redhat-marketplace-h5g6n\" (UID: \"6cdce19a-2872-4dea-94d1-6497f104c890\") " pod="openshift-marketplace/redhat-marketplace-h5g6n" Feb 02 11:20:08 crc kubenswrapper[4838]: I0202 11:20:08.591907 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cdce19a-2872-4dea-94d1-6497f104c890-utilities\") pod \"redhat-marketplace-h5g6n\" (UID: \"6cdce19a-2872-4dea-94d1-6497f104c890\") " pod="openshift-marketplace/redhat-marketplace-h5g6n" Feb 02 11:20:08 crc kubenswrapper[4838]: I0202 11:20:08.592200 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cdce19a-2872-4dea-94d1-6497f104c890-catalog-content\") pod \"redhat-marketplace-h5g6n\" (UID: \"6cdce19a-2872-4dea-94d1-6497f104c890\") " pod="openshift-marketplace/redhat-marketplace-h5g6n" Feb 02 11:20:08 crc kubenswrapper[4838]: I0202 11:20:08.613605 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkc8m\" (UniqueName: \"kubernetes.io/projected/6cdce19a-2872-4dea-94d1-6497f104c890-kube-api-access-jkc8m\") pod \"redhat-marketplace-h5g6n\" (UID: \"6cdce19a-2872-4dea-94d1-6497f104c890\") " pod="openshift-marketplace/redhat-marketplace-h5g6n" Feb 02 11:20:08 crc kubenswrapper[4838]: I0202 11:20:08.728583 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5g6n" Feb 02 11:20:09 crc kubenswrapper[4838]: I0202 11:20:09.234264 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5g6n"] Feb 02 11:20:09 crc kubenswrapper[4838]: I0202 11:20:09.789296 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5g6n" event={"ID":"6cdce19a-2872-4dea-94d1-6497f104c890","Type":"ContainerStarted","Data":"c376051b3171964e92c98859670a1f2524b04e107f2f3c2a4bffae33a24c5a92"} Feb 02 11:20:10 crc kubenswrapper[4838]: I0202 11:20:10.424681 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 02 11:20:10 crc kubenswrapper[4838]: I0202 11:20:10.425817 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 02 11:20:10 crc kubenswrapper[4838]: I0202 11:20:10.426039 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 02 11:20:10 crc kubenswrapper[4838]: I0202 11:20:10.436675 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 02 11:20:10 crc kubenswrapper[4838]: I0202 11:20:10.825653 4838 generic.go:334] "Generic (PLEG): container finished" podID="6cdce19a-2872-4dea-94d1-6497f104c890" containerID="bce426557a516289d643aee246d7e09b68780da1b5f0758cea76f4b93b9dbe71" exitCode=0 Feb 02 11:20:10 crc kubenswrapper[4838]: I0202 11:20:10.825854 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5g6n" event={"ID":"6cdce19a-2872-4dea-94d1-6497f104c890","Type":"ContainerDied","Data":"bce426557a516289d643aee246d7e09b68780da1b5f0758cea76f4b93b9dbe71"} Feb 02 11:20:10 crc kubenswrapper[4838]: I0202 11:20:10.826465 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 02 11:20:10 crc kubenswrapper[4838]: I0202 11:20:10.844612 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.050081 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-bjxmv"] Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.051787 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.080323 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-bjxmv"] Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.149170 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2a16a8c7-7667-401d-93aa-d0209c7c6ea7-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-bjxmv\" (UID: \"2a16a8c7-7667-401d-93aa-d0209c7c6ea7\") " pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.149245 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2a16a8c7-7667-401d-93aa-d0209c7c6ea7-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-bjxmv\" (UID: \"2a16a8c7-7667-401d-93aa-d0209c7c6ea7\") " pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.149280 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmw79\" (UniqueName: \"kubernetes.io/projected/2a16a8c7-7667-401d-93aa-d0209c7c6ea7-kube-api-access-pmw79\") pod \"dnsmasq-dns-89c5cd4d5-bjxmv\" (UID: \"2a16a8c7-7667-401d-93aa-d0209c7c6ea7\") " pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.149412 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a16a8c7-7667-401d-93aa-d0209c7c6ea7-config\") pod \"dnsmasq-dns-89c5cd4d5-bjxmv\" (UID: \"2a16a8c7-7667-401d-93aa-d0209c7c6ea7\") " pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.149455 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2a16a8c7-7667-401d-93aa-d0209c7c6ea7-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-bjxmv\" (UID: \"2a16a8c7-7667-401d-93aa-d0209c7c6ea7\") " pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.149485 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2a16a8c7-7667-401d-93aa-d0209c7c6ea7-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-bjxmv\" (UID: \"2a16a8c7-7667-401d-93aa-d0209c7c6ea7\") " pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.252048 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2a16a8c7-7667-401d-93aa-d0209c7c6ea7-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-bjxmv\" (UID: \"2a16a8c7-7667-401d-93aa-d0209c7c6ea7\") " pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.252148 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2a16a8c7-7667-401d-93aa-d0209c7c6ea7-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-bjxmv\" (UID: \"2a16a8c7-7667-401d-93aa-d0209c7c6ea7\") " pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.252283 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmw79\" (UniqueName: \"kubernetes.io/projected/2a16a8c7-7667-401d-93aa-d0209c7c6ea7-kube-api-access-pmw79\") pod \"dnsmasq-dns-89c5cd4d5-bjxmv\" (UID: \"2a16a8c7-7667-401d-93aa-d0209c7c6ea7\") " pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.252427 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a16a8c7-7667-401d-93aa-d0209c7c6ea7-config\") pod \"dnsmasq-dns-89c5cd4d5-bjxmv\" (UID: \"2a16a8c7-7667-401d-93aa-d0209c7c6ea7\") " pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.252491 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2a16a8c7-7667-401d-93aa-d0209c7c6ea7-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-bjxmv\" (UID: \"2a16a8c7-7667-401d-93aa-d0209c7c6ea7\") " pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.252525 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2a16a8c7-7667-401d-93aa-d0209c7c6ea7-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-bjxmv\" (UID: \"2a16a8c7-7667-401d-93aa-d0209c7c6ea7\") " pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.253415 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2a16a8c7-7667-401d-93aa-d0209c7c6ea7-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-bjxmv\" (UID: \"2a16a8c7-7667-401d-93aa-d0209c7c6ea7\") " pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.253746 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a16a8c7-7667-401d-93aa-d0209c7c6ea7-config\") pod \"dnsmasq-dns-89c5cd4d5-bjxmv\" (UID: \"2a16a8c7-7667-401d-93aa-d0209c7c6ea7\") " pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.254291 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2a16a8c7-7667-401d-93aa-d0209c7c6ea7-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-bjxmv\" (UID: \"2a16a8c7-7667-401d-93aa-d0209c7c6ea7\") " pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.254573 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2a16a8c7-7667-401d-93aa-d0209c7c6ea7-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-bjxmv\" (UID: \"2a16a8c7-7667-401d-93aa-d0209c7c6ea7\") " pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.254712 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2a16a8c7-7667-401d-93aa-d0209c7c6ea7-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-bjxmv\" (UID: \"2a16a8c7-7667-401d-93aa-d0209c7c6ea7\") " pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.271883 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmw79\" (UniqueName: \"kubernetes.io/projected/2a16a8c7-7667-401d-93aa-d0209c7c6ea7-kube-api-access-pmw79\") pod \"dnsmasq-dns-89c5cd4d5-bjxmv\" (UID: \"2a16a8c7-7667-401d-93aa-d0209c7c6ea7\") " pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.387533 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.853242 4838 generic.go:334] "Generic (PLEG): container finished" podID="7d6708b7-91c5-4090-8e1f-60061ca37055" containerID="82660f60e665882b502595c6b18c8dfda06ee7965108b4b37daf07039c16abcd" exitCode=0 Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.853423 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55db57f78d-mdj8d" event={"ID":"7d6708b7-91c5-4090-8e1f-60061ca37055","Type":"ContainerDied","Data":"82660f60e665882b502595c6b18c8dfda06ee7965108b4b37daf07039c16abcd"} Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.875248 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-bjxmv"] Feb 02 11:20:11 crc kubenswrapper[4838]: W0202 11:20:11.888097 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a16a8c7_7667_401d_93aa_d0209c7c6ea7.slice/crio-882125ed8ed008f621b6128603a9d30b6824a206179a21e8d4d8458564e72173 WatchSource:0}: Error finding container 882125ed8ed008f621b6128603a9d30b6824a206179a21e8d4d8458564e72173: Status 404 returned error can't find the container with id 882125ed8ed008f621b6128603a9d30b6824a206179a21e8d4d8458564e72173 Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.938144 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.941409 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 02 11:20:11 crc kubenswrapper[4838]: I0202 11:20:11.968242 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.380321 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55db57f78d-mdj8d" Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.488164 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-config\") pod \"7d6708b7-91c5-4090-8e1f-60061ca37055\" (UID: \"7d6708b7-91c5-4090-8e1f-60061ca37055\") " Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.488248 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqrsk\" (UniqueName: \"kubernetes.io/projected/7d6708b7-91c5-4090-8e1f-60061ca37055-kube-api-access-fqrsk\") pod \"7d6708b7-91c5-4090-8e1f-60061ca37055\" (UID: \"7d6708b7-91c5-4090-8e1f-60061ca37055\") " Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.488284 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-httpd-config\") pod \"7d6708b7-91c5-4090-8e1f-60061ca37055\" (UID: \"7d6708b7-91c5-4090-8e1f-60061ca37055\") " Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.488345 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-combined-ca-bundle\") pod \"7d6708b7-91c5-4090-8e1f-60061ca37055\" (UID: \"7d6708b7-91c5-4090-8e1f-60061ca37055\") " Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.488438 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-ovndb-tls-certs\") pod \"7d6708b7-91c5-4090-8e1f-60061ca37055\" (UID: \"7d6708b7-91c5-4090-8e1f-60061ca37055\") " Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.492700 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d6708b7-91c5-4090-8e1f-60061ca37055-kube-api-access-fqrsk" (OuterVolumeSpecName: "kube-api-access-fqrsk") pod "7d6708b7-91c5-4090-8e1f-60061ca37055" (UID: "7d6708b7-91c5-4090-8e1f-60061ca37055"). InnerVolumeSpecName "kube-api-access-fqrsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.499958 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "7d6708b7-91c5-4090-8e1f-60061ca37055" (UID: "7d6708b7-91c5-4090-8e1f-60061ca37055"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.553023 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7d6708b7-91c5-4090-8e1f-60061ca37055" (UID: "7d6708b7-91c5-4090-8e1f-60061ca37055"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.576832 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-config" (OuterVolumeSpecName: "config") pod "7d6708b7-91c5-4090-8e1f-60061ca37055" (UID: "7d6708b7-91c5-4090-8e1f-60061ca37055"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.587941 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "7d6708b7-91c5-4090-8e1f-60061ca37055" (UID: "7d6708b7-91c5-4090-8e1f-60061ca37055"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.590273 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqrsk\" (UniqueName: \"kubernetes.io/projected/7d6708b7-91c5-4090-8e1f-60061ca37055-kube-api-access-fqrsk\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.590303 4838 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-httpd-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.590314 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.590324 4838 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.590332 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/7d6708b7-91c5-4090-8e1f-60061ca37055-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.865157 4838 generic.go:334] "Generic (PLEG): container finished" podID="2a16a8c7-7667-401d-93aa-d0209c7c6ea7" containerID="27c4eb84105ad0ec47e326dc96f70bcd5ac9bb0c0067b3c1c7286d72042da80d" exitCode=0 Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.865266 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" event={"ID":"2a16a8c7-7667-401d-93aa-d0209c7c6ea7","Type":"ContainerDied","Data":"27c4eb84105ad0ec47e326dc96f70bcd5ac9bb0c0067b3c1c7286d72042da80d"} Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.865540 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" event={"ID":"2a16a8c7-7667-401d-93aa-d0209c7c6ea7","Type":"ContainerStarted","Data":"882125ed8ed008f621b6128603a9d30b6824a206179a21e8d4d8458564e72173"} Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.874823 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55db57f78d-mdj8d" Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.874869 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55db57f78d-mdj8d" event={"ID":"7d6708b7-91c5-4090-8e1f-60061ca37055","Type":"ContainerDied","Data":"604229b14684efec320d8467e0b7f3713182d277fc2c21b3e580ad79ee78fa09"} Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.874916 4838 scope.go:117] "RemoveContainer" containerID="4526e7d2d33a8a9946a0b12ad48b9cb8e7dc9dd25c6ee4956284f9466b4bfd04" Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.891638 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.949898 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-55db57f78d-mdj8d"] Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.954753 4838 scope.go:117] "RemoveContainer" containerID="82660f60e665882b502595c6b18c8dfda06ee7965108b4b37daf07039c16abcd" Feb 02 11:20:12 crc kubenswrapper[4838]: I0202 11:20:12.961890 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-55db57f78d-mdj8d"] Feb 02 11:20:13 crc kubenswrapper[4838]: I0202 11:20:13.740177 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 11:20:13 crc kubenswrapper[4838]: I0202 11:20:13.886140 4838 generic.go:334] "Generic (PLEG): container finished" podID="b6ecc1cc-2175-4372-ae32-61761f66a342" containerID="8426a6e4d214510a52f278be0c850961a4eb3e86434e21bbda26e1cacba5f3a2" exitCode=0 Feb 02 11:20:13 crc kubenswrapper[4838]: I0202 11:20:13.886227 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-9ln9l" event={"ID":"b6ecc1cc-2175-4372-ae32-61761f66a342","Type":"ContainerDied","Data":"8426a6e4d214510a52f278be0c850961a4eb3e86434e21bbda26e1cacba5f3a2"} Feb 02 11:20:13 crc kubenswrapper[4838]: I0202 11:20:13.888456 4838 generic.go:334] "Generic (PLEG): container finished" podID="6cdce19a-2872-4dea-94d1-6497f104c890" containerID="8f20d1af6b82b75cb1f8f1d66396d964d1719d147821610773ef9f61124023ac" exitCode=0 Feb 02 11:20:13 crc kubenswrapper[4838]: I0202 11:20:13.888544 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5g6n" event={"ID":"6cdce19a-2872-4dea-94d1-6497f104c890","Type":"ContainerDied","Data":"8f20d1af6b82b75cb1f8f1d66396d964d1719d147821610773ef9f61124023ac"} Feb 02 11:20:13 crc kubenswrapper[4838]: I0202 11:20:13.890743 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" event={"ID":"2a16a8c7-7667-401d-93aa-d0209c7c6ea7","Type":"ContainerStarted","Data":"186f2bdb1554d354da85d801f082f248994c2d8ea438c81574d1c6d1102caa79"} Feb 02 11:20:13 crc kubenswrapper[4838]: I0202 11:20:13.891061 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="098dda3e-e72b-47f4-a2b4-b213b2710990" containerName="nova-api-log" containerID="cri-o://3fa6906531577063158d4d8b8af272bd626a8537209c53a7be293759eaa7e132" gracePeriod=30 Feb 02 11:20:13 crc kubenswrapper[4838]: I0202 11:20:13.891115 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="098dda3e-e72b-47f4-a2b4-b213b2710990" containerName="nova-api-api" containerID="cri-o://428e64d6af02a52beee4311b8cf4444fdd2c078430cc23a25c8d145a929b03a2" gracePeriod=30 Feb 02 11:20:13 crc kubenswrapper[4838]: I0202 11:20:13.965229 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" podStartSLOduration=2.965211338 podStartE2EDuration="2.965211338s" podCreationTimestamp="2026-02-02 11:20:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:20:13.956370663 +0000 UTC m=+1608.293471701" watchObservedRunningTime="2026-02-02 11:20:13.965211338 +0000 UTC m=+1608.302312356" Feb 02 11:20:14 crc kubenswrapper[4838]: I0202 11:20:14.455882 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:20:14 crc kubenswrapper[4838]: I0202 11:20:14.457001 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" containerName="ceilometer-central-agent" containerID="cri-o://6273287249d78e66e69e3090ecb23467d3a6564d67bca5a537a33286cb95af24" gracePeriod=30 Feb 02 11:20:14 crc kubenswrapper[4838]: I0202 11:20:14.457254 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" containerName="proxy-httpd" containerID="cri-o://f9e793113d17cc6cf2ed43d46c0e13f1ccfd3131091aefaa2512f6235f884bb4" gracePeriod=30 Feb 02 11:20:14 crc kubenswrapper[4838]: I0202 11:20:14.457264 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" containerName="sg-core" containerID="cri-o://2cd555b5cefee893aed1fed93bd0885d7e6eb91778f7d5f88561925443d3b326" gracePeriod=30 Feb 02 11:20:14 crc kubenswrapper[4838]: I0202 11:20:14.457383 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" containerName="ceilometer-notification-agent" containerID="cri-o://9dce7b9504df09cd7e5d4028c315aaf338b1b59e11bcacc59433b30faf9f4adf" gracePeriod=30 Feb 02 11:20:14 crc kubenswrapper[4838]: I0202 11:20:14.528836 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d6708b7-91c5-4090-8e1f-60061ca37055" path="/var/lib/kubelet/pods/7d6708b7-91c5-4090-8e1f-60061ca37055/volumes" Feb 02 11:20:14 crc kubenswrapper[4838]: I0202 11:20:14.566682 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.212:3000/\": read tcp 10.217.0.2:38536->10.217.0.212:3000: read: connection reset by peer" Feb 02 11:20:14 crc kubenswrapper[4838]: I0202 11:20:14.902425 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5g6n" event={"ID":"6cdce19a-2872-4dea-94d1-6497f104c890","Type":"ContainerStarted","Data":"634a64b28efa5eef64a92fa766c9c061426043b7409474f068724d0a5b70caf3"} Feb 02 11:20:14 crc kubenswrapper[4838]: I0202 11:20:14.905950 4838 generic.go:334] "Generic (PLEG): container finished" podID="098dda3e-e72b-47f4-a2b4-b213b2710990" containerID="3fa6906531577063158d4d8b8af272bd626a8537209c53a7be293759eaa7e132" exitCode=143 Feb 02 11:20:14 crc kubenswrapper[4838]: I0202 11:20:14.906022 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"098dda3e-e72b-47f4-a2b4-b213b2710990","Type":"ContainerDied","Data":"3fa6906531577063158d4d8b8af272bd626a8537209c53a7be293759eaa7e132"} Feb 02 11:20:14 crc kubenswrapper[4838]: I0202 11:20:14.909043 4838 generic.go:334] "Generic (PLEG): container finished" podID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" containerID="f9e793113d17cc6cf2ed43d46c0e13f1ccfd3131091aefaa2512f6235f884bb4" exitCode=0 Feb 02 11:20:14 crc kubenswrapper[4838]: I0202 11:20:14.909066 4838 generic.go:334] "Generic (PLEG): container finished" podID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" containerID="2cd555b5cefee893aed1fed93bd0885d7e6eb91778f7d5f88561925443d3b326" exitCode=2 Feb 02 11:20:14 crc kubenswrapper[4838]: I0202 11:20:14.909073 4838 generic.go:334] "Generic (PLEG): container finished" podID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" containerID="6273287249d78e66e69e3090ecb23467d3a6564d67bca5a537a33286cb95af24" exitCode=0 Feb 02 11:20:14 crc kubenswrapper[4838]: I0202 11:20:14.909166 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844","Type":"ContainerDied","Data":"f9e793113d17cc6cf2ed43d46c0e13f1ccfd3131091aefaa2512f6235f884bb4"} Feb 02 11:20:14 crc kubenswrapper[4838]: I0202 11:20:14.909189 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844","Type":"ContainerDied","Data":"2cd555b5cefee893aed1fed93bd0885d7e6eb91778f7d5f88561925443d3b326"} Feb 02 11:20:14 crc kubenswrapper[4838]: I0202 11:20:14.909200 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844","Type":"ContainerDied","Data":"6273287249d78e66e69e3090ecb23467d3a6564d67bca5a537a33286cb95af24"} Feb 02 11:20:14 crc kubenswrapper[4838]: I0202 11:20:14.909451 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:14 crc kubenswrapper[4838]: I0202 11:20:14.938593 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-h5g6n" podStartSLOduration=3.415031484 podStartE2EDuration="6.93856961s" podCreationTimestamp="2026-02-02 11:20:08 +0000 UTC" firstStartedPulling="2026-02-02 11:20:10.828114774 +0000 UTC m=+1605.165215792" lastFinishedPulling="2026-02-02 11:20:14.35165289 +0000 UTC m=+1608.688753918" observedRunningTime="2026-02-02 11:20:14.92431566 +0000 UTC m=+1609.261416698" watchObservedRunningTime="2026-02-02 11:20:14.93856961 +0000 UTC m=+1609.275670638" Feb 02 11:20:15 crc kubenswrapper[4838]: I0202 11:20:15.419174 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-9ln9l" Feb 02 11:20:15 crc kubenswrapper[4838]: I0202 11:20:15.429771 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:20:15 crc kubenswrapper[4838]: I0202 11:20:15.429822 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:20:15 crc kubenswrapper[4838]: I0202 11:20:15.456698 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ecc1cc-2175-4372-ae32-61761f66a342-combined-ca-bundle\") pod \"b6ecc1cc-2175-4372-ae32-61761f66a342\" (UID: \"b6ecc1cc-2175-4372-ae32-61761f66a342\") " Feb 02 11:20:15 crc kubenswrapper[4838]: I0202 11:20:15.456812 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9pmq2\" (UniqueName: \"kubernetes.io/projected/b6ecc1cc-2175-4372-ae32-61761f66a342-kube-api-access-9pmq2\") pod \"b6ecc1cc-2175-4372-ae32-61761f66a342\" (UID: \"b6ecc1cc-2175-4372-ae32-61761f66a342\") " Feb 02 11:20:15 crc kubenswrapper[4838]: I0202 11:20:15.456891 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6ecc1cc-2175-4372-ae32-61761f66a342-scripts\") pod \"b6ecc1cc-2175-4372-ae32-61761f66a342\" (UID: \"b6ecc1cc-2175-4372-ae32-61761f66a342\") " Feb 02 11:20:15 crc kubenswrapper[4838]: I0202 11:20:15.456924 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6ecc1cc-2175-4372-ae32-61761f66a342-config-data\") pod \"b6ecc1cc-2175-4372-ae32-61761f66a342\" (UID: \"b6ecc1cc-2175-4372-ae32-61761f66a342\") " Feb 02 11:20:15 crc kubenswrapper[4838]: I0202 11:20:15.464443 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6ecc1cc-2175-4372-ae32-61761f66a342-kube-api-access-9pmq2" (OuterVolumeSpecName: "kube-api-access-9pmq2") pod "b6ecc1cc-2175-4372-ae32-61761f66a342" (UID: "b6ecc1cc-2175-4372-ae32-61761f66a342"). InnerVolumeSpecName "kube-api-access-9pmq2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:20:15 crc kubenswrapper[4838]: I0202 11:20:15.467981 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6ecc1cc-2175-4372-ae32-61761f66a342-scripts" (OuterVolumeSpecName: "scripts") pod "b6ecc1cc-2175-4372-ae32-61761f66a342" (UID: "b6ecc1cc-2175-4372-ae32-61761f66a342"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:15 crc kubenswrapper[4838]: I0202 11:20:15.525844 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6ecc1cc-2175-4372-ae32-61761f66a342-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b6ecc1cc-2175-4372-ae32-61761f66a342" (UID: "b6ecc1cc-2175-4372-ae32-61761f66a342"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:15 crc kubenswrapper[4838]: I0202 11:20:15.552033 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6ecc1cc-2175-4372-ae32-61761f66a342-config-data" (OuterVolumeSpecName: "config-data") pod "b6ecc1cc-2175-4372-ae32-61761f66a342" (UID: "b6ecc1cc-2175-4372-ae32-61761f66a342"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:15 crc kubenswrapper[4838]: I0202 11:20:15.559939 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6ecc1cc-2175-4372-ae32-61761f66a342-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:15 crc kubenswrapper[4838]: I0202 11:20:15.559980 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ecc1cc-2175-4372-ae32-61761f66a342-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:15 crc kubenswrapper[4838]: I0202 11:20:15.559996 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9pmq2\" (UniqueName: \"kubernetes.io/projected/b6ecc1cc-2175-4372-ae32-61761f66a342-kube-api-access-9pmq2\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:15 crc kubenswrapper[4838]: I0202 11:20:15.560008 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6ecc1cc-2175-4372-ae32-61761f66a342-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:15 crc kubenswrapper[4838]: I0202 11:20:15.923069 4838 generic.go:334] "Generic (PLEG): container finished" podID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" containerID="9dce7b9504df09cd7e5d4028c315aaf338b1b59e11bcacc59433b30faf9f4adf" exitCode=0 Feb 02 11:20:15 crc kubenswrapper[4838]: I0202 11:20:15.923137 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844","Type":"ContainerDied","Data":"9dce7b9504df09cd7e5d4028c315aaf338b1b59e11bcacc59433b30faf9f4adf"} Feb 02 11:20:15 crc kubenswrapper[4838]: I0202 11:20:15.924923 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-9ln9l" event={"ID":"b6ecc1cc-2175-4372-ae32-61761f66a342","Type":"ContainerDied","Data":"08bf5299bded889c56457c36a306c9f5f722451a9449edc497097cefbee7c0d9"} Feb 02 11:20:15 crc kubenswrapper[4838]: I0202 11:20:15.924953 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="08bf5299bded889c56457c36a306c9f5f722451a9449edc497097cefbee7c0d9" Feb 02 11:20:15 crc kubenswrapper[4838]: I0202 11:20:15.924976 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-9ln9l" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.304389 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lq5ql"] Feb 02 11:20:16 crc kubenswrapper[4838]: E0202 11:20:16.304821 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6ecc1cc-2175-4372-ae32-61761f66a342" containerName="nova-manage" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.304837 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6ecc1cc-2175-4372-ae32-61761f66a342" containerName="nova-manage" Feb 02 11:20:16 crc kubenswrapper[4838]: E0202 11:20:16.304854 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d6708b7-91c5-4090-8e1f-60061ca37055" containerName="neutron-httpd" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.304860 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d6708b7-91c5-4090-8e1f-60061ca37055" containerName="neutron-httpd" Feb 02 11:20:16 crc kubenswrapper[4838]: E0202 11:20:16.304876 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d6708b7-91c5-4090-8e1f-60061ca37055" containerName="neutron-api" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.304882 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d6708b7-91c5-4090-8e1f-60061ca37055" containerName="neutron-api" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.305096 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d6708b7-91c5-4090-8e1f-60061ca37055" containerName="neutron-api" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.305117 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6ecc1cc-2175-4372-ae32-61761f66a342" containerName="nova-manage" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.305136 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d6708b7-91c5-4090-8e1f-60061ca37055" containerName="neutron-httpd" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.306487 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lq5ql" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.319680 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lq5ql"] Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.376700 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mxsc\" (UniqueName: \"kubernetes.io/projected/ba3f8bb4-fdbc-4534-b20c-df6f61e4f520-kube-api-access-9mxsc\") pod \"community-operators-lq5ql\" (UID: \"ba3f8bb4-fdbc-4534-b20c-df6f61e4f520\") " pod="openshift-marketplace/community-operators-lq5ql" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.377063 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba3f8bb4-fdbc-4534-b20c-df6f61e4f520-utilities\") pod \"community-operators-lq5ql\" (UID: \"ba3f8bb4-fdbc-4534-b20c-df6f61e4f520\") " pod="openshift-marketplace/community-operators-lq5ql" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.377087 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba3f8bb4-fdbc-4534-b20c-df6f61e4f520-catalog-content\") pod \"community-operators-lq5ql\" (UID: \"ba3f8bb4-fdbc-4534-b20c-df6f61e4f520\") " pod="openshift-marketplace/community-operators-lq5ql" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.486395 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mxsc\" (UniqueName: \"kubernetes.io/projected/ba3f8bb4-fdbc-4534-b20c-df6f61e4f520-kube-api-access-9mxsc\") pod \"community-operators-lq5ql\" (UID: \"ba3f8bb4-fdbc-4534-b20c-df6f61e4f520\") " pod="openshift-marketplace/community-operators-lq5ql" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.486459 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba3f8bb4-fdbc-4534-b20c-df6f61e4f520-utilities\") pod \"community-operators-lq5ql\" (UID: \"ba3f8bb4-fdbc-4534-b20c-df6f61e4f520\") " pod="openshift-marketplace/community-operators-lq5ql" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.486485 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba3f8bb4-fdbc-4534-b20c-df6f61e4f520-catalog-content\") pod \"community-operators-lq5ql\" (UID: \"ba3f8bb4-fdbc-4534-b20c-df6f61e4f520\") " pod="openshift-marketplace/community-operators-lq5ql" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.487209 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba3f8bb4-fdbc-4534-b20c-df6f61e4f520-catalog-content\") pod \"community-operators-lq5ql\" (UID: \"ba3f8bb4-fdbc-4534-b20c-df6f61e4f520\") " pod="openshift-marketplace/community-operators-lq5ql" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.487818 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba3f8bb4-fdbc-4534-b20c-df6f61e4f520-utilities\") pod \"community-operators-lq5ql\" (UID: \"ba3f8bb4-fdbc-4534-b20c-df6f61e4f520\") " pod="openshift-marketplace/community-operators-lq5ql" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.528186 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mxsc\" (UniqueName: \"kubernetes.io/projected/ba3f8bb4-fdbc-4534-b20c-df6f61e4f520-kube-api-access-9mxsc\") pod \"community-operators-lq5ql\" (UID: \"ba3f8bb4-fdbc-4534-b20c-df6f61e4f520\") " pod="openshift-marketplace/community-operators-lq5ql" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.572981 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.573401 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="cf592065-13f1-4594-9642-6f7c039c42ad" containerName="nova-scheduler-scheduler" containerID="cri-o://bd2f1cd35141bdbd92bb940bd14c53131e1c96a6453b532363b73553b1eba69e" gracePeriod=30 Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.640237 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lq5ql" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.663483 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.664545 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="cd9b0ea2-d076-43f8-87f9-6491b526d025" containerName="nova-metadata-log" containerID="cri-o://ba2b37934b1e825f87fd76dafdbc49dbf642370474af8bc236f4d85e3fb26e1a" gracePeriod=30 Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.665088 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="cd9b0ea2-d076-43f8-87f9-6491b526d025" containerName="nova-metadata-metadata" containerID="cri-o://d53be724496dbbd590059689260af49ff69e481140da43f2dea56654f613dc4a" gracePeriod=30 Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.694702 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.797231 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-log-httpd\") pod \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.797313 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-run-httpd\") pod \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.797373 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-sg-core-conf-yaml\") pod \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.797397 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rqft\" (UniqueName: \"kubernetes.io/projected/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-kube-api-access-8rqft\") pod \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.797472 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-ceilometer-tls-certs\") pod \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.797504 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-config-data\") pod \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.797565 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-scripts\") pod \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.797677 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-combined-ca-bundle\") pod \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\" (UID: \"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844\") " Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.803663 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" (UID: "5b5bda7c-3bda-4cb3-a77a-e73d9c00b844"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.803924 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" (UID: "5b5bda7c-3bda-4cb3-a77a-e73d9c00b844"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.814180 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-scripts" (OuterVolumeSpecName: "scripts") pod "5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" (UID: "5b5bda7c-3bda-4cb3-a77a-e73d9c00b844"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.823333 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-kube-api-access-8rqft" (OuterVolumeSpecName: "kube-api-access-8rqft") pod "5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" (UID: "5b5bda7c-3bda-4cb3-a77a-e73d9c00b844"). InnerVolumeSpecName "kube-api-access-8rqft". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.882002 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" (UID: "5b5bda7c-3bda-4cb3-a77a-e73d9c00b844"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.911451 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.911474 4838 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.911483 4838 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.911491 4838 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.911501 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rqft\" (UniqueName: \"kubernetes.io/projected/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-kube-api-access-8rqft\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.977775 4838 generic.go:334] "Generic (PLEG): container finished" podID="cd9b0ea2-d076-43f8-87f9-6491b526d025" containerID="ba2b37934b1e825f87fd76dafdbc49dbf642370474af8bc236f4d85e3fb26e1a" exitCode=143 Feb 02 11:20:16 crc kubenswrapper[4838]: I0202 11:20:16.978067 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cd9b0ea2-d076-43f8-87f9-6491b526d025","Type":"ContainerDied","Data":"ba2b37934b1e825f87fd76dafdbc49dbf642370474af8bc236f4d85e3fb26e1a"} Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.045646 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5b5bda7c-3bda-4cb3-a77a-e73d9c00b844","Type":"ContainerDied","Data":"e7bc439ed8e9d1593dcf92013fd934e691bbaa9f4d9fef74e8565d98a5486051"} Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.045692 4838 scope.go:117] "RemoveContainer" containerID="f9e793113d17cc6cf2ed43d46c0e13f1ccfd3131091aefaa2512f6235f884bb4" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.045824 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.046764 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" (UID: "5b5bda7c-3bda-4cb3-a77a-e73d9c00b844"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.086763 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" (UID: "5b5bda7c-3bda-4cb3-a77a-e73d9c00b844"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.106788 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-config-data" (OuterVolumeSpecName: "config-data") pod "5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" (UID: "5b5bda7c-3bda-4cb3-a77a-e73d9c00b844"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.123935 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.123974 4838 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.123985 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.138811 4838 scope.go:117] "RemoveContainer" containerID="2cd555b5cefee893aed1fed93bd0885d7e6eb91778f7d5f88561925443d3b326" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.218818 4838 scope.go:117] "RemoveContainer" containerID="9dce7b9504df09cd7e5d4028c315aaf338b1b59e11bcacc59433b30faf9f4adf" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.285635 4838 scope.go:117] "RemoveContainer" containerID="6273287249d78e66e69e3090ecb23467d3a6564d67bca5a537a33286cb95af24" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.401284 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.421660 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lq5ql"] Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.434313 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.448659 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:20:17 crc kubenswrapper[4838]: E0202 11:20:17.449209 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" containerName="sg-core" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.449229 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" containerName="sg-core" Feb 02 11:20:17 crc kubenswrapper[4838]: E0202 11:20:17.449304 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" containerName="proxy-httpd" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.449312 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" containerName="proxy-httpd" Feb 02 11:20:17 crc kubenswrapper[4838]: E0202 11:20:17.449328 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" containerName="ceilometer-central-agent" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.449335 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" containerName="ceilometer-central-agent" Feb 02 11:20:17 crc kubenswrapper[4838]: E0202 11:20:17.449374 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" containerName="ceilometer-notification-agent" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.449385 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" containerName="ceilometer-notification-agent" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.449790 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" containerName="proxy-httpd" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.449818 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" containerName="ceilometer-notification-agent" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.449828 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" containerName="ceilometer-central-agent" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.449847 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" containerName="sg-core" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.452693 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.458861 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.459039 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.458862 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.473819 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.530994 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2l99\" (UniqueName: \"kubernetes.io/projected/6397c86f-66a1-4278-a210-5fe35904b1c3-kube-api-access-d2l99\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.531041 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.531059 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-scripts\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.531081 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.531098 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6397c86f-66a1-4278-a210-5fe35904b1c3-run-httpd\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.531150 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-config-data\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.531163 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6397c86f-66a1-4278-a210-5fe35904b1c3-log-httpd\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.531240 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.633875 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.636345 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2l99\" (UniqueName: \"kubernetes.io/projected/6397c86f-66a1-4278-a210-5fe35904b1c3-kube-api-access-d2l99\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.637451 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-scripts\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.637486 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.637545 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.637579 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6397c86f-66a1-4278-a210-5fe35904b1c3-run-httpd\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.638949 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6397c86f-66a1-4278-a210-5fe35904b1c3-run-httpd\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.640299 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-config-data\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.640327 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6397c86f-66a1-4278-a210-5fe35904b1c3-log-httpd\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.640758 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6397c86f-66a1-4278-a210-5fe35904b1c3-log-httpd\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.642740 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.644758 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-config-data\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.645392 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.657276 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.663235 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2l99\" (UniqueName: \"kubernetes.io/projected/6397c86f-66a1-4278-a210-5fe35904b1c3-kube-api-access-d2l99\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.686011 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-scripts\") pod \"ceilometer-0\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.794729 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:20:17 crc kubenswrapper[4838]: I0202 11:20:17.946987 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.053581 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/098dda3e-e72b-47f4-a2b4-b213b2710990-logs\") pod \"098dda3e-e72b-47f4-a2b4-b213b2710990\" (UID: \"098dda3e-e72b-47f4-a2b4-b213b2710990\") " Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.055470 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8qlwm\" (UniqueName: \"kubernetes.io/projected/098dda3e-e72b-47f4-a2b4-b213b2710990-kube-api-access-8qlwm\") pod \"098dda3e-e72b-47f4-a2b4-b213b2710990\" (UID: \"098dda3e-e72b-47f4-a2b4-b213b2710990\") " Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.055822 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/098dda3e-e72b-47f4-a2b4-b213b2710990-logs" (OuterVolumeSpecName: "logs") pod "098dda3e-e72b-47f4-a2b4-b213b2710990" (UID: "098dda3e-e72b-47f4-a2b4-b213b2710990"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.056171 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/098dda3e-e72b-47f4-a2b4-b213b2710990-config-data\") pod \"098dda3e-e72b-47f4-a2b4-b213b2710990\" (UID: \"098dda3e-e72b-47f4-a2b4-b213b2710990\") " Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.056284 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/098dda3e-e72b-47f4-a2b4-b213b2710990-combined-ca-bundle\") pod \"098dda3e-e72b-47f4-a2b4-b213b2710990\" (UID: \"098dda3e-e72b-47f4-a2b4-b213b2710990\") " Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.057670 4838 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/098dda3e-e72b-47f4-a2b4-b213b2710990-logs\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.061334 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/098dda3e-e72b-47f4-a2b4-b213b2710990-kube-api-access-8qlwm" (OuterVolumeSpecName: "kube-api-access-8qlwm") pod "098dda3e-e72b-47f4-a2b4-b213b2710990" (UID: "098dda3e-e72b-47f4-a2b4-b213b2710990"). InnerVolumeSpecName "kube-api-access-8qlwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.062874 4838 generic.go:334] "Generic (PLEG): container finished" podID="098dda3e-e72b-47f4-a2b4-b213b2710990" containerID="428e64d6af02a52beee4311b8cf4444fdd2c078430cc23a25c8d145a929b03a2" exitCode=0 Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.062944 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"098dda3e-e72b-47f4-a2b4-b213b2710990","Type":"ContainerDied","Data":"428e64d6af02a52beee4311b8cf4444fdd2c078430cc23a25c8d145a929b03a2"} Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.062976 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"098dda3e-e72b-47f4-a2b4-b213b2710990","Type":"ContainerDied","Data":"020148b2e199fecc2bb3b6aed4645db960c678f5e5f0ab9057c27765e62f4e4d"} Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.062997 4838 scope.go:117] "RemoveContainer" containerID="428e64d6af02a52beee4311b8cf4444fdd2c078430cc23a25c8d145a929b03a2" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.063122 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.071400 4838 generic.go:334] "Generic (PLEG): container finished" podID="ba3f8bb4-fdbc-4534-b20c-df6f61e4f520" containerID="b4aabe4e7178839b44f6e257f70df7bbdc3d81f6766309501d4ed6d2e1482a0b" exitCode=0 Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.071456 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lq5ql" event={"ID":"ba3f8bb4-fdbc-4534-b20c-df6f61e4f520","Type":"ContainerDied","Data":"b4aabe4e7178839b44f6e257f70df7bbdc3d81f6766309501d4ed6d2e1482a0b"} Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.071488 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lq5ql" event={"ID":"ba3f8bb4-fdbc-4534-b20c-df6f61e4f520","Type":"ContainerStarted","Data":"5f2b2410b397022112f6e1683ebdba065e04fa472ea824bd4678ef27d59942f8"} Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.094160 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/098dda3e-e72b-47f4-a2b4-b213b2710990-config-data" (OuterVolumeSpecName: "config-data") pod "098dda3e-e72b-47f4-a2b4-b213b2710990" (UID: "098dda3e-e72b-47f4-a2b4-b213b2710990"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.099502 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/098dda3e-e72b-47f4-a2b4-b213b2710990-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "098dda3e-e72b-47f4-a2b4-b213b2710990" (UID: "098dda3e-e72b-47f4-a2b4-b213b2710990"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.101799 4838 scope.go:117] "RemoveContainer" containerID="3fa6906531577063158d4d8b8af272bd626a8537209c53a7be293759eaa7e132" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.131950 4838 scope.go:117] "RemoveContainer" containerID="428e64d6af02a52beee4311b8cf4444fdd2c078430cc23a25c8d145a929b03a2" Feb 02 11:20:18 crc kubenswrapper[4838]: E0202 11:20:18.133054 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"428e64d6af02a52beee4311b8cf4444fdd2c078430cc23a25c8d145a929b03a2\": container with ID starting with 428e64d6af02a52beee4311b8cf4444fdd2c078430cc23a25c8d145a929b03a2 not found: ID does not exist" containerID="428e64d6af02a52beee4311b8cf4444fdd2c078430cc23a25c8d145a929b03a2" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.133094 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"428e64d6af02a52beee4311b8cf4444fdd2c078430cc23a25c8d145a929b03a2"} err="failed to get container status \"428e64d6af02a52beee4311b8cf4444fdd2c078430cc23a25c8d145a929b03a2\": rpc error: code = NotFound desc = could not find container \"428e64d6af02a52beee4311b8cf4444fdd2c078430cc23a25c8d145a929b03a2\": container with ID starting with 428e64d6af02a52beee4311b8cf4444fdd2c078430cc23a25c8d145a929b03a2 not found: ID does not exist" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.133122 4838 scope.go:117] "RemoveContainer" containerID="3fa6906531577063158d4d8b8af272bd626a8537209c53a7be293759eaa7e132" Feb 02 11:20:18 crc kubenswrapper[4838]: E0202 11:20:18.133456 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fa6906531577063158d4d8b8af272bd626a8537209c53a7be293759eaa7e132\": container with ID starting with 3fa6906531577063158d4d8b8af272bd626a8537209c53a7be293759eaa7e132 not found: ID does not exist" containerID="3fa6906531577063158d4d8b8af272bd626a8537209c53a7be293759eaa7e132" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.133508 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fa6906531577063158d4d8b8af272bd626a8537209c53a7be293759eaa7e132"} err="failed to get container status \"3fa6906531577063158d4d8b8af272bd626a8537209c53a7be293759eaa7e132\": rpc error: code = NotFound desc = could not find container \"3fa6906531577063158d4d8b8af272bd626a8537209c53a7be293759eaa7e132\": container with ID starting with 3fa6906531577063158d4d8b8af272bd626a8537209c53a7be293759eaa7e132 not found: ID does not exist" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.160153 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8qlwm\" (UniqueName: \"kubernetes.io/projected/098dda3e-e72b-47f4-a2b4-b213b2710990-kube-api-access-8qlwm\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.160191 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/098dda3e-e72b-47f4-a2b4-b213b2710990-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.160203 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/098dda3e-e72b-47f4-a2b4-b213b2710990-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.299515 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.405694 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.418530 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.429402 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 02 11:20:18 crc kubenswrapper[4838]: E0202 11:20:18.429931 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="098dda3e-e72b-47f4-a2b4-b213b2710990" containerName="nova-api-api" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.429951 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="098dda3e-e72b-47f4-a2b4-b213b2710990" containerName="nova-api-api" Feb 02 11:20:18 crc kubenswrapper[4838]: E0202 11:20:18.429968 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="098dda3e-e72b-47f4-a2b4-b213b2710990" containerName="nova-api-log" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.429974 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="098dda3e-e72b-47f4-a2b4-b213b2710990" containerName="nova-api-log" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.430161 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="098dda3e-e72b-47f4-a2b4-b213b2710990" containerName="nova-api-api" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.430186 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="098dda3e-e72b-47f4-a2b4-b213b2710990" containerName="nova-api-log" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.431317 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.433340 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.433411 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.440518 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.463734 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.521984 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="098dda3e-e72b-47f4-a2b4-b213b2710990" path="/var/lib/kubelet/pods/098dda3e-e72b-47f4-a2b4-b213b2710990/volumes" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.522919 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b5bda7c-3bda-4cb3-a77a-e73d9c00b844" path="/var/lib/kubelet/pods/5b5bda7c-3bda-4cb3-a77a-e73d9c00b844/volumes" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.566435 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3dc8cbc-2c9c-4192-99e3-7724d3c28c68-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b3dc8cbc-2c9c-4192-99e3-7724d3c28c68\") " pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.566709 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3dc8cbc-2c9c-4192-99e3-7724d3c28c68-config-data\") pod \"nova-api-0\" (UID: \"b3dc8cbc-2c9c-4192-99e3-7724d3c28c68\") " pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.567666 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3dc8cbc-2c9c-4192-99e3-7724d3c28c68-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b3dc8cbc-2c9c-4192-99e3-7724d3c28c68\") " pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.567904 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tml59\" (UniqueName: \"kubernetes.io/projected/b3dc8cbc-2c9c-4192-99e3-7724d3c28c68-kube-api-access-tml59\") pod \"nova-api-0\" (UID: \"b3dc8cbc-2c9c-4192-99e3-7724d3c28c68\") " pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.568248 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3dc8cbc-2c9c-4192-99e3-7724d3c28c68-public-tls-certs\") pod \"nova-api-0\" (UID: \"b3dc8cbc-2c9c-4192-99e3-7724d3c28c68\") " pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.568426 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3dc8cbc-2c9c-4192-99e3-7724d3c28c68-logs\") pod \"nova-api-0\" (UID: \"b3dc8cbc-2c9c-4192-99e3-7724d3c28c68\") " pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.593279 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.670495 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3dc8cbc-2c9c-4192-99e3-7724d3c28c68-config-data\") pod \"nova-api-0\" (UID: \"b3dc8cbc-2c9c-4192-99e3-7724d3c28c68\") " pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.670560 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3dc8cbc-2c9c-4192-99e3-7724d3c28c68-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b3dc8cbc-2c9c-4192-99e3-7724d3c28c68\") " pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.670641 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tml59\" (UniqueName: \"kubernetes.io/projected/b3dc8cbc-2c9c-4192-99e3-7724d3c28c68-kube-api-access-tml59\") pod \"nova-api-0\" (UID: \"b3dc8cbc-2c9c-4192-99e3-7724d3c28c68\") " pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.670688 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3dc8cbc-2c9c-4192-99e3-7724d3c28c68-public-tls-certs\") pod \"nova-api-0\" (UID: \"b3dc8cbc-2c9c-4192-99e3-7724d3c28c68\") " pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.670720 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3dc8cbc-2c9c-4192-99e3-7724d3c28c68-logs\") pod \"nova-api-0\" (UID: \"b3dc8cbc-2c9c-4192-99e3-7724d3c28c68\") " pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.670783 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3dc8cbc-2c9c-4192-99e3-7724d3c28c68-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b3dc8cbc-2c9c-4192-99e3-7724d3c28c68\") " pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.671291 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3dc8cbc-2c9c-4192-99e3-7724d3c28c68-logs\") pod \"nova-api-0\" (UID: \"b3dc8cbc-2c9c-4192-99e3-7724d3c28c68\") " pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.675243 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3dc8cbc-2c9c-4192-99e3-7724d3c28c68-config-data\") pod \"nova-api-0\" (UID: \"b3dc8cbc-2c9c-4192-99e3-7724d3c28c68\") " pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.675389 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3dc8cbc-2c9c-4192-99e3-7724d3c28c68-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b3dc8cbc-2c9c-4192-99e3-7724d3c28c68\") " pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.676104 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3dc8cbc-2c9c-4192-99e3-7724d3c28c68-public-tls-certs\") pod \"nova-api-0\" (UID: \"b3dc8cbc-2c9c-4192-99e3-7724d3c28c68\") " pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.676924 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3dc8cbc-2c9c-4192-99e3-7724d3c28c68-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b3dc8cbc-2c9c-4192-99e3-7724d3c28c68\") " pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.693907 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tml59\" (UniqueName: \"kubernetes.io/projected/b3dc8cbc-2c9c-4192-99e3-7724d3c28c68-kube-api-access-tml59\") pod \"nova-api-0\" (UID: \"b3dc8cbc-2c9c-4192-99e3-7724d3c28c68\") " pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.735670 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-h5g6n" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.737702 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-h5g6n" Feb 02 11:20:18 crc kubenswrapper[4838]: E0202 11:20:18.759910 4838 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bd2f1cd35141bdbd92bb940bd14c53131e1c96a6453b532363b73553b1eba69e is running failed: container process not found" containerID="bd2f1cd35141bdbd92bb940bd14c53131e1c96a6453b532363b73553b1eba69e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 02 11:20:18 crc kubenswrapper[4838]: E0202 11:20:18.760437 4838 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bd2f1cd35141bdbd92bb940bd14c53131e1c96a6453b532363b73553b1eba69e is running failed: container process not found" containerID="bd2f1cd35141bdbd92bb940bd14c53131e1c96a6453b532363b73553b1eba69e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 02 11:20:18 crc kubenswrapper[4838]: E0202 11:20:18.760853 4838 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bd2f1cd35141bdbd92bb940bd14c53131e1c96a6453b532363b73553b1eba69e is running failed: container process not found" containerID="bd2f1cd35141bdbd92bb940bd14c53131e1c96a6453b532363b73553b1eba69e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 02 11:20:18 crc kubenswrapper[4838]: E0202 11:20:18.760950 4838 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bd2f1cd35141bdbd92bb940bd14c53131e1c96a6453b532363b73553b1eba69e is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="cf592065-13f1-4594-9642-6f7c039c42ad" containerName="nova-scheduler-scheduler" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.790820 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.818406 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-h5g6n" Feb 02 11:20:18 crc kubenswrapper[4838]: I0202 11:20:18.952115 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.079165 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf592065-13f1-4594-9642-6f7c039c42ad-config-data\") pod \"cf592065-13f1-4594-9642-6f7c039c42ad\" (UID: \"cf592065-13f1-4594-9642-6f7c039c42ad\") " Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.079344 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bxt5g\" (UniqueName: \"kubernetes.io/projected/cf592065-13f1-4594-9642-6f7c039c42ad-kube-api-access-bxt5g\") pod \"cf592065-13f1-4594-9642-6f7c039c42ad\" (UID: \"cf592065-13f1-4594-9642-6f7c039c42ad\") " Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.079365 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf592065-13f1-4594-9642-6f7c039c42ad-combined-ca-bundle\") pod \"cf592065-13f1-4594-9642-6f7c039c42ad\" (UID: \"cf592065-13f1-4594-9642-6f7c039c42ad\") " Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.086001 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf592065-13f1-4594-9642-6f7c039c42ad-kube-api-access-bxt5g" (OuterVolumeSpecName: "kube-api-access-bxt5g") pod "cf592065-13f1-4594-9642-6f7c039c42ad" (UID: "cf592065-13f1-4594-9642-6f7c039c42ad"). InnerVolumeSpecName "kube-api-access-bxt5g". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.090173 4838 generic.go:334] "Generic (PLEG): container finished" podID="cf592065-13f1-4594-9642-6f7c039c42ad" containerID="bd2f1cd35141bdbd92bb940bd14c53131e1c96a6453b532363b73553b1eba69e" exitCode=0 Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.090232 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cf592065-13f1-4594-9642-6f7c039c42ad","Type":"ContainerDied","Data":"bd2f1cd35141bdbd92bb940bd14c53131e1c96a6453b532363b73553b1eba69e"} Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.090263 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cf592065-13f1-4594-9642-6f7c039c42ad","Type":"ContainerDied","Data":"5e548a758dcbbe3cc84622c561defaca4c28e6f678650f124f8185db5d79be8f"} Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.090284 4838 scope.go:117] "RemoveContainer" containerID="bd2f1cd35141bdbd92bb940bd14c53131e1c96a6453b532363b73553b1eba69e" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.090404 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.094695 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6397c86f-66a1-4278-a210-5fe35904b1c3","Type":"ContainerStarted","Data":"ba63162d9334b8000f7ca271e062b94069004dea745ee64a5fd075649e936f59"} Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.111846 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf592065-13f1-4594-9642-6f7c039c42ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf592065-13f1-4594-9642-6f7c039c42ad" (UID: "cf592065-13f1-4594-9642-6f7c039c42ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.114360 4838 scope.go:117] "RemoveContainer" containerID="bd2f1cd35141bdbd92bb940bd14c53131e1c96a6453b532363b73553b1eba69e" Feb 02 11:20:19 crc kubenswrapper[4838]: E0202 11:20:19.114922 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd2f1cd35141bdbd92bb940bd14c53131e1c96a6453b532363b73553b1eba69e\": container with ID starting with bd2f1cd35141bdbd92bb940bd14c53131e1c96a6453b532363b73553b1eba69e not found: ID does not exist" containerID="bd2f1cd35141bdbd92bb940bd14c53131e1c96a6453b532363b73553b1eba69e" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.114956 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd2f1cd35141bdbd92bb940bd14c53131e1c96a6453b532363b73553b1eba69e"} err="failed to get container status \"bd2f1cd35141bdbd92bb940bd14c53131e1c96a6453b532363b73553b1eba69e\": rpc error: code = NotFound desc = could not find container \"bd2f1cd35141bdbd92bb940bd14c53131e1c96a6453b532363b73553b1eba69e\": container with ID starting with bd2f1cd35141bdbd92bb940bd14c53131e1c96a6453b532363b73553b1eba69e not found: ID does not exist" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.125728 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf592065-13f1-4594-9642-6f7c039c42ad-config-data" (OuterVolumeSpecName: "config-data") pod "cf592065-13f1-4594-9642-6f7c039c42ad" (UID: "cf592065-13f1-4594-9642-6f7c039c42ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.169660 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-h5g6n" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.181602 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf592065-13f1-4594-9642-6f7c039c42ad-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.181931 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bxt5g\" (UniqueName: \"kubernetes.io/projected/cf592065-13f1-4594-9642-6f7c039c42ad-kube-api-access-bxt5g\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.182014 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf592065-13f1-4594-9642-6f7c039c42ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.291604 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 11:20:19 crc kubenswrapper[4838]: W0202 11:20:19.302242 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb3dc8cbc_2c9c_4192_99e3_7724d3c28c68.slice/crio-f25e2cd11e70f6efea25bbe5daf99781eabc74f273eecb1427b9cf0894e07c3c WatchSource:0}: Error finding container f25e2cd11e70f6efea25bbe5daf99781eabc74f273eecb1427b9cf0894e07c3c: Status 404 returned error can't find the container with id f25e2cd11e70f6efea25bbe5daf99781eabc74f273eecb1427b9cf0894e07c3c Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.429718 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.438494 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.471296 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 11:20:19 crc kubenswrapper[4838]: E0202 11:20:19.471915 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf592065-13f1-4594-9642-6f7c039c42ad" containerName="nova-scheduler-scheduler" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.471985 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf592065-13f1-4594-9642-6f7c039c42ad" containerName="nova-scheduler-scheduler" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.472312 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf592065-13f1-4594-9642-6f7c039c42ad" containerName="nova-scheduler-scheduler" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.473166 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.477093 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.483165 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.588609 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f864d41-d3ba-4025-9ffc-d60bb52a18b1-config-data\") pod \"nova-scheduler-0\" (UID: \"1f864d41-d3ba-4025-9ffc-d60bb52a18b1\") " pod="openstack/nova-scheduler-0" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.588784 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xg8ft\" (UniqueName: \"kubernetes.io/projected/1f864d41-d3ba-4025-9ffc-d60bb52a18b1-kube-api-access-xg8ft\") pod \"nova-scheduler-0\" (UID: \"1f864d41-d3ba-4025-9ffc-d60bb52a18b1\") " pod="openstack/nova-scheduler-0" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.588875 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f864d41-d3ba-4025-9ffc-d60bb52a18b1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1f864d41-d3ba-4025-9ffc-d60bb52a18b1\") " pod="openstack/nova-scheduler-0" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.691186 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f864d41-d3ba-4025-9ffc-d60bb52a18b1-config-data\") pod \"nova-scheduler-0\" (UID: \"1f864d41-d3ba-4025-9ffc-d60bb52a18b1\") " pod="openstack/nova-scheduler-0" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.691607 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xg8ft\" (UniqueName: \"kubernetes.io/projected/1f864d41-d3ba-4025-9ffc-d60bb52a18b1-kube-api-access-xg8ft\") pod \"nova-scheduler-0\" (UID: \"1f864d41-d3ba-4025-9ffc-d60bb52a18b1\") " pod="openstack/nova-scheduler-0" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.691726 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f864d41-d3ba-4025-9ffc-d60bb52a18b1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1f864d41-d3ba-4025-9ffc-d60bb52a18b1\") " pod="openstack/nova-scheduler-0" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.695074 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f864d41-d3ba-4025-9ffc-d60bb52a18b1-config-data\") pod \"nova-scheduler-0\" (UID: \"1f864d41-d3ba-4025-9ffc-d60bb52a18b1\") " pod="openstack/nova-scheduler-0" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.695443 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f864d41-d3ba-4025-9ffc-d60bb52a18b1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1f864d41-d3ba-4025-9ffc-d60bb52a18b1\") " pod="openstack/nova-scheduler-0" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.722191 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xg8ft\" (UniqueName: \"kubernetes.io/projected/1f864d41-d3ba-4025-9ffc-d60bb52a18b1-kube-api-access-xg8ft\") pod \"nova-scheduler-0\" (UID: \"1f864d41-d3ba-4025-9ffc-d60bb52a18b1\") " pod="openstack/nova-scheduler-0" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.797123 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.860666 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="cd9b0ea2-d076-43f8-87f9-6491b526d025" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.216:8775/\": read tcp 10.217.0.2:50756->10.217.0.216:8775: read: connection reset by peer" Feb 02 11:20:19 crc kubenswrapper[4838]: I0202 11:20:19.861095 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="cd9b0ea2-d076-43f8-87f9-6491b526d025" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.216:8775/\": read tcp 10.217.0.2:50770->10.217.0.216:8775: read: connection reset by peer" Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.118151 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b3dc8cbc-2c9c-4192-99e3-7724d3c28c68","Type":"ContainerStarted","Data":"6e02dd5d7114a0d92a98d9a1a506906eec19b1612ecbf05b710068bcb6ae5adf"} Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.118192 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b3dc8cbc-2c9c-4192-99e3-7724d3c28c68","Type":"ContainerStarted","Data":"f25e2cd11e70f6efea25bbe5daf99781eabc74f273eecb1427b9cf0894e07c3c"} Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.120072 4838 generic.go:334] "Generic (PLEG): container finished" podID="cd9b0ea2-d076-43f8-87f9-6491b526d025" containerID="d53be724496dbbd590059689260af49ff69e481140da43f2dea56654f613dc4a" exitCode=0 Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.120114 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cd9b0ea2-d076-43f8-87f9-6491b526d025","Type":"ContainerDied","Data":"d53be724496dbbd590059689260af49ff69e481140da43f2dea56654f613dc4a"} Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.124213 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6397c86f-66a1-4278-a210-5fe35904b1c3","Type":"ContainerStarted","Data":"8a170125169f6889d890b414ed6f12a20911947f4575596947989e8143a87112"} Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.137119 4838 generic.go:334] "Generic (PLEG): container finished" podID="ba3f8bb4-fdbc-4534-b20c-df6f61e4f520" containerID="d6a5b3e1e3078525d973a08ccf0d97e428181aa8b957ddb5039e2661ddf1a500" exitCode=0 Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.137591 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lq5ql" event={"ID":"ba3f8bb4-fdbc-4534-b20c-df6f61e4f520","Type":"ContainerDied","Data":"d6a5b3e1e3078525d973a08ccf0d97e428181aa8b957ddb5039e2661ddf1a500"} Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.373211 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.469978 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.512261 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd9b0ea2-d076-43f8-87f9-6491b526d025-logs\") pod \"cd9b0ea2-d076-43f8-87f9-6491b526d025\" (UID: \"cd9b0ea2-d076-43f8-87f9-6491b526d025\") " Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.512413 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qk6xk\" (UniqueName: \"kubernetes.io/projected/cd9b0ea2-d076-43f8-87f9-6491b526d025-kube-api-access-qk6xk\") pod \"cd9b0ea2-d076-43f8-87f9-6491b526d025\" (UID: \"cd9b0ea2-d076-43f8-87f9-6491b526d025\") " Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.512481 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd9b0ea2-d076-43f8-87f9-6491b526d025-config-data\") pod \"cd9b0ea2-d076-43f8-87f9-6491b526d025\" (UID: \"cd9b0ea2-d076-43f8-87f9-6491b526d025\") " Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.512586 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b0ea2-d076-43f8-87f9-6491b526d025-combined-ca-bundle\") pod \"cd9b0ea2-d076-43f8-87f9-6491b526d025\" (UID: \"cd9b0ea2-d076-43f8-87f9-6491b526d025\") " Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.512658 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd9b0ea2-d076-43f8-87f9-6491b526d025-nova-metadata-tls-certs\") pod \"cd9b0ea2-d076-43f8-87f9-6491b526d025\" (UID: \"cd9b0ea2-d076-43f8-87f9-6491b526d025\") " Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.514790 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd9b0ea2-d076-43f8-87f9-6491b526d025-logs" (OuterVolumeSpecName: "logs") pod "cd9b0ea2-d076-43f8-87f9-6491b526d025" (UID: "cd9b0ea2-d076-43f8-87f9-6491b526d025"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.535158 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf592065-13f1-4594-9642-6f7c039c42ad" path="/var/lib/kubelet/pods/cf592065-13f1-4594-9642-6f7c039c42ad/volumes" Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.535943 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd9b0ea2-d076-43f8-87f9-6491b526d025-kube-api-access-qk6xk" (OuterVolumeSpecName: "kube-api-access-qk6xk") pod "cd9b0ea2-d076-43f8-87f9-6491b526d025" (UID: "cd9b0ea2-d076-43f8-87f9-6491b526d025"). InnerVolumeSpecName "kube-api-access-qk6xk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.605950 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd9b0ea2-d076-43f8-87f9-6491b526d025-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cd9b0ea2-d076-43f8-87f9-6491b526d025" (UID: "cd9b0ea2-d076-43f8-87f9-6491b526d025"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.610416 4838 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod7ce26605-8dfc-48cd-a362-1a37c67ea300"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod7ce26605-8dfc-48cd-a362-1a37c67ea300] : Timed out while waiting for systemd to remove kubepods-besteffort-pod7ce26605_8dfc_48cd_a362_1a37c67ea300.slice" Feb 02 11:20:20 crc kubenswrapper[4838]: E0202 11:20:20.610489 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod7ce26605-8dfc-48cd-a362-1a37c67ea300] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod7ce26605-8dfc-48cd-a362-1a37c67ea300] : Timed out while waiting for systemd to remove kubepods-besteffort-pod7ce26605_8dfc_48cd_a362_1a37c67ea300.slice" pod="openstack/nova-cell1-conductor-db-sync-5tpqb" podUID="7ce26605-8dfc-48cd-a362-1a37c67ea300" Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.620531 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qk6xk\" (UniqueName: \"kubernetes.io/projected/cd9b0ea2-d076-43f8-87f9-6491b526d025-kube-api-access-qk6xk\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.620581 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9b0ea2-d076-43f8-87f9-6491b526d025-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.620594 4838 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd9b0ea2-d076-43f8-87f9-6491b526d025-logs\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.633736 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd9b0ea2-d076-43f8-87f9-6491b526d025-config-data" (OuterVolumeSpecName: "config-data") pod "cd9b0ea2-d076-43f8-87f9-6491b526d025" (UID: "cd9b0ea2-d076-43f8-87f9-6491b526d025"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.639833 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd9b0ea2-d076-43f8-87f9-6491b526d025-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "cd9b0ea2-d076-43f8-87f9-6491b526d025" (UID: "cd9b0ea2-d076-43f8-87f9-6491b526d025"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.722037 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd9b0ea2-d076-43f8-87f9-6491b526d025-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:20 crc kubenswrapper[4838]: I0202 11:20:20.722080 4838 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd9b0ea2-d076-43f8-87f9-6491b526d025-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.075236 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5g6n"] Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.159053 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b3dc8cbc-2c9c-4192-99e3-7724d3c28c68","Type":"ContainerStarted","Data":"4f0582f04a947defeab2a0137f8902f83d04433ee7745c6e4c093393a3c93e31"} Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.171277 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cd9b0ea2-d076-43f8-87f9-6491b526d025","Type":"ContainerDied","Data":"2a7abc64fa6c9bfc7fd7318477fb07b58b5a598242fdda0d24dfbddd6cfbefea"} Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.171336 4838 scope.go:117] "RemoveContainer" containerID="d53be724496dbbd590059689260af49ff69e481140da43f2dea56654f613dc4a" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.171430 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.180065 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1f864d41-d3ba-4025-9ffc-d60bb52a18b1","Type":"ContainerStarted","Data":"e3129edc318b3b3bb48aa00303aa674f1f9133028edfcfd1c00eaa61c8d2bbb2"} Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.180282 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1f864d41-d3ba-4025-9ffc-d60bb52a18b1","Type":"ContainerStarted","Data":"bd5144bd14b4a314ffdeff6a2351de55f4f14c2fb54e0f3623b69159837ee58e"} Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.188769 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6397c86f-66a1-4278-a210-5fe35904b1c3","Type":"ContainerStarted","Data":"7025f97cc39b88876ab987ec82e0421127527baf447e1c1808d55357535e587a"} Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.202207 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.202184647 podStartE2EDuration="3.202184647s" podCreationTimestamp="2026-02-02 11:20:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:20:21.184108875 +0000 UTC m=+1615.521209923" watchObservedRunningTime="2026-02-02 11:20:21.202184647 +0000 UTC m=+1615.539285675" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.212358 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.212335277 podStartE2EDuration="2.212335277s" podCreationTimestamp="2026-02-02 11:20:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:20:21.210973191 +0000 UTC m=+1615.548074229" watchObservedRunningTime="2026-02-02 11:20:21.212335277 +0000 UTC m=+1615.549436315" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.216810 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lq5ql" event={"ID":"ba3f8bb4-fdbc-4534-b20c-df6f61e4f520","Type":"ContainerStarted","Data":"3cdd611b1be2d3ebe42022994f552f691fd0f980327a4ce4b87bcfe2894d8292"} Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.217116 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-5tpqb" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.237027 4838 scope.go:117] "RemoveContainer" containerID="ba2b37934b1e825f87fd76dafdbc49dbf642370474af8bc236f4d85e3fb26e1a" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.294653 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.327454 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.339139 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:20:21 crc kubenswrapper[4838]: E0202 11:20:21.339676 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd9b0ea2-d076-43f8-87f9-6491b526d025" containerName="nova-metadata-log" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.339696 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd9b0ea2-d076-43f8-87f9-6491b526d025" containerName="nova-metadata-log" Feb 02 11:20:21 crc kubenswrapper[4838]: E0202 11:20:21.339743 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd9b0ea2-d076-43f8-87f9-6491b526d025" containerName="nova-metadata-metadata" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.339754 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd9b0ea2-d076-43f8-87f9-6491b526d025" containerName="nova-metadata-metadata" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.339957 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd9b0ea2-d076-43f8-87f9-6491b526d025" containerName="nova-metadata-log" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.339992 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd9b0ea2-d076-43f8-87f9-6491b526d025" containerName="nova-metadata-metadata" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.341078 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.343337 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.349979 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lq5ql" podStartSLOduration=2.709591026 podStartE2EDuration="5.349959092s" podCreationTimestamp="2026-02-02 11:20:16 +0000 UTC" firstStartedPulling="2026-02-02 11:20:18.083535094 +0000 UTC m=+1612.420636122" lastFinishedPulling="2026-02-02 11:20:20.72390316 +0000 UTC m=+1615.061004188" observedRunningTime="2026-02-02 11:20:21.263473179 +0000 UTC m=+1615.600574217" watchObservedRunningTime="2026-02-02 11:20:21.349959092 +0000 UTC m=+1615.687060140" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.355313 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.368098 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.389810 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-89c5cd4d5-bjxmv" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.454596 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6d9b193-d35a-40e7-87da-b20cfaca82b4-config-data\") pod \"nova-metadata-0\" (UID: \"b6d9b193-d35a-40e7-87da-b20cfaca82b4\") " pod="openstack/nova-metadata-0" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.454724 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6d9b193-d35a-40e7-87da-b20cfaca82b4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b6d9b193-d35a-40e7-87da-b20cfaca82b4\") " pod="openstack/nova-metadata-0" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.454891 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xlzkk\" (UniqueName: \"kubernetes.io/projected/b6d9b193-d35a-40e7-87da-b20cfaca82b4-kube-api-access-xlzkk\") pod \"nova-metadata-0\" (UID: \"b6d9b193-d35a-40e7-87da-b20cfaca82b4\") " pod="openstack/nova-metadata-0" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.454943 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6d9b193-d35a-40e7-87da-b20cfaca82b4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b6d9b193-d35a-40e7-87da-b20cfaca82b4\") " pod="openstack/nova-metadata-0" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.455072 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6d9b193-d35a-40e7-87da-b20cfaca82b4-logs\") pod \"nova-metadata-0\" (UID: \"b6d9b193-d35a-40e7-87da-b20cfaca82b4\") " pod="openstack/nova-metadata-0" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.472908 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-5ww45"] Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.473149 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-757b4f8459-5ww45" podUID="3c81155e-9881-4ed2-bcec-7035aed80588" containerName="dnsmasq-dns" containerID="cri-o://eb7c61580160aafc830c2316da7aa59476cc37a6b075e760c58e1eb752864b0f" gracePeriod=10 Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.516106 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-757b4f8459-5ww45" podUID="3c81155e-9881-4ed2-bcec-7035aed80588" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.206:5353: connect: connection refused" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.557107 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6d9b193-d35a-40e7-87da-b20cfaca82b4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b6d9b193-d35a-40e7-87da-b20cfaca82b4\") " pod="openstack/nova-metadata-0" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.557519 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xlzkk\" (UniqueName: \"kubernetes.io/projected/b6d9b193-d35a-40e7-87da-b20cfaca82b4-kube-api-access-xlzkk\") pod \"nova-metadata-0\" (UID: \"b6d9b193-d35a-40e7-87da-b20cfaca82b4\") " pod="openstack/nova-metadata-0" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.557558 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6d9b193-d35a-40e7-87da-b20cfaca82b4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b6d9b193-d35a-40e7-87da-b20cfaca82b4\") " pod="openstack/nova-metadata-0" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.557675 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6d9b193-d35a-40e7-87da-b20cfaca82b4-logs\") pod \"nova-metadata-0\" (UID: \"b6d9b193-d35a-40e7-87da-b20cfaca82b4\") " pod="openstack/nova-metadata-0" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.558080 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6d9b193-d35a-40e7-87da-b20cfaca82b4-logs\") pod \"nova-metadata-0\" (UID: \"b6d9b193-d35a-40e7-87da-b20cfaca82b4\") " pod="openstack/nova-metadata-0" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.558323 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6d9b193-d35a-40e7-87da-b20cfaca82b4-config-data\") pod \"nova-metadata-0\" (UID: \"b6d9b193-d35a-40e7-87da-b20cfaca82b4\") " pod="openstack/nova-metadata-0" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.567310 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6d9b193-d35a-40e7-87da-b20cfaca82b4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b6d9b193-d35a-40e7-87da-b20cfaca82b4\") " pod="openstack/nova-metadata-0" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.567640 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6d9b193-d35a-40e7-87da-b20cfaca82b4-config-data\") pod \"nova-metadata-0\" (UID: \"b6d9b193-d35a-40e7-87da-b20cfaca82b4\") " pod="openstack/nova-metadata-0" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.570020 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6d9b193-d35a-40e7-87da-b20cfaca82b4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b6d9b193-d35a-40e7-87da-b20cfaca82b4\") " pod="openstack/nova-metadata-0" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.585235 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xlzkk\" (UniqueName: \"kubernetes.io/projected/b6d9b193-d35a-40e7-87da-b20cfaca82b4-kube-api-access-xlzkk\") pod \"nova-metadata-0\" (UID: \"b6d9b193-d35a-40e7-87da-b20cfaca82b4\") " pod="openstack/nova-metadata-0" Feb 02 11:20:21 crc kubenswrapper[4838]: I0202 11:20:21.663114 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.165834 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.242606 4838 generic.go:334] "Generic (PLEG): container finished" podID="3c81155e-9881-4ed2-bcec-7035aed80588" containerID="eb7c61580160aafc830c2316da7aa59476cc37a6b075e760c58e1eb752864b0f" exitCode=0 Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.242678 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-5ww45" event={"ID":"3c81155e-9881-4ed2-bcec-7035aed80588","Type":"ContainerDied","Data":"eb7c61580160aafc830c2316da7aa59476cc37a6b075e760c58e1eb752864b0f"} Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.242706 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-5ww45" event={"ID":"3c81155e-9881-4ed2-bcec-7035aed80588","Type":"ContainerDied","Data":"298b4e08c6354f1cbc9450ad53d80971787adf9db2b8890bb064087d492702ff"} Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.242724 4838 scope.go:117] "RemoveContainer" containerID="eb7c61580160aafc830c2316da7aa59476cc37a6b075e760c58e1eb752864b0f" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.242857 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-5ww45" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.249262 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-h5g6n" podUID="6cdce19a-2872-4dea-94d1-6497f104c890" containerName="registry-server" containerID="cri-o://634a64b28efa5eef64a92fa766c9c061426043b7409474f068724d0a5b70caf3" gracePeriod=2 Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.277583 4838 scope.go:117] "RemoveContainer" containerID="a53f9634b4ee823444d4cca2daffa6f3d61b1f6290df168d42f2c024cb8c37d1" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.284969 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-ovsdbserver-sb\") pod \"3c81155e-9881-4ed2-bcec-7035aed80588\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.285071 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzs5j\" (UniqueName: \"kubernetes.io/projected/3c81155e-9881-4ed2-bcec-7035aed80588-kube-api-access-gzs5j\") pod \"3c81155e-9881-4ed2-bcec-7035aed80588\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.285181 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-dns-swift-storage-0\") pod \"3c81155e-9881-4ed2-bcec-7035aed80588\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.285346 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-dns-svc\") pod \"3c81155e-9881-4ed2-bcec-7035aed80588\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.285374 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-config\") pod \"3c81155e-9881-4ed2-bcec-7035aed80588\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.285469 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-ovsdbserver-nb\") pod \"3c81155e-9881-4ed2-bcec-7035aed80588\" (UID: \"3c81155e-9881-4ed2-bcec-7035aed80588\") " Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.309880 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c81155e-9881-4ed2-bcec-7035aed80588-kube-api-access-gzs5j" (OuterVolumeSpecName: "kube-api-access-gzs5j") pod "3c81155e-9881-4ed2-bcec-7035aed80588" (UID: "3c81155e-9881-4ed2-bcec-7035aed80588"). InnerVolumeSpecName "kube-api-access-gzs5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.329542 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.340700 4838 scope.go:117] "RemoveContainer" containerID="eb7c61580160aafc830c2316da7aa59476cc37a6b075e760c58e1eb752864b0f" Feb 02 11:20:22 crc kubenswrapper[4838]: E0202 11:20:22.344123 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb7c61580160aafc830c2316da7aa59476cc37a6b075e760c58e1eb752864b0f\": container with ID starting with eb7c61580160aafc830c2316da7aa59476cc37a6b075e760c58e1eb752864b0f not found: ID does not exist" containerID="eb7c61580160aafc830c2316da7aa59476cc37a6b075e760c58e1eb752864b0f" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.344157 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb7c61580160aafc830c2316da7aa59476cc37a6b075e760c58e1eb752864b0f"} err="failed to get container status \"eb7c61580160aafc830c2316da7aa59476cc37a6b075e760c58e1eb752864b0f\": rpc error: code = NotFound desc = could not find container \"eb7c61580160aafc830c2316da7aa59476cc37a6b075e760c58e1eb752864b0f\": container with ID starting with eb7c61580160aafc830c2316da7aa59476cc37a6b075e760c58e1eb752864b0f not found: ID does not exist" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.344185 4838 scope.go:117] "RemoveContainer" containerID="a53f9634b4ee823444d4cca2daffa6f3d61b1f6290df168d42f2c024cb8c37d1" Feb 02 11:20:22 crc kubenswrapper[4838]: E0202 11:20:22.344490 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a53f9634b4ee823444d4cca2daffa6f3d61b1f6290df168d42f2c024cb8c37d1\": container with ID starting with a53f9634b4ee823444d4cca2daffa6f3d61b1f6290df168d42f2c024cb8c37d1 not found: ID does not exist" containerID="a53f9634b4ee823444d4cca2daffa6f3d61b1f6290df168d42f2c024cb8c37d1" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.344516 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a53f9634b4ee823444d4cca2daffa6f3d61b1f6290df168d42f2c024cb8c37d1"} err="failed to get container status \"a53f9634b4ee823444d4cca2daffa6f3d61b1f6290df168d42f2c024cb8c37d1\": rpc error: code = NotFound desc = could not find container \"a53f9634b4ee823444d4cca2daffa6f3d61b1f6290df168d42f2c024cb8c37d1\": container with ID starting with a53f9634b4ee823444d4cca2daffa6f3d61b1f6290df168d42f2c024cb8c37d1 not found: ID does not exist" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.387718 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzs5j\" (UniqueName: \"kubernetes.io/projected/3c81155e-9881-4ed2-bcec-7035aed80588-kube-api-access-gzs5j\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.422164 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3c81155e-9881-4ed2-bcec-7035aed80588" (UID: "3c81155e-9881-4ed2-bcec-7035aed80588"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.430502 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3c81155e-9881-4ed2-bcec-7035aed80588" (UID: "3c81155e-9881-4ed2-bcec-7035aed80588"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.432759 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-config" (OuterVolumeSpecName: "config") pod "3c81155e-9881-4ed2-bcec-7035aed80588" (UID: "3c81155e-9881-4ed2-bcec-7035aed80588"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.459074 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3c81155e-9881-4ed2-bcec-7035aed80588" (UID: "3c81155e-9881-4ed2-bcec-7035aed80588"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.459915 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3c81155e-9881-4ed2-bcec-7035aed80588" (UID: "3c81155e-9881-4ed2-bcec-7035aed80588"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.489334 4838 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.489380 4838 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-config\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.489395 4838 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.489407 4838 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.489419 4838 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3c81155e-9881-4ed2-bcec-7035aed80588-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.528962 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd9b0ea2-d076-43f8-87f9-6491b526d025" path="/var/lib/kubelet/pods/cd9b0ea2-d076-43f8-87f9-6491b526d025/volumes" Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.856510 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-5ww45"] Feb 02 11:20:22 crc kubenswrapper[4838]: I0202 11:20:22.866077 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-5ww45"] Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.013351 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5g6n" Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.110750 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkc8m\" (UniqueName: \"kubernetes.io/projected/6cdce19a-2872-4dea-94d1-6497f104c890-kube-api-access-jkc8m\") pod \"6cdce19a-2872-4dea-94d1-6497f104c890\" (UID: \"6cdce19a-2872-4dea-94d1-6497f104c890\") " Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.110820 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cdce19a-2872-4dea-94d1-6497f104c890-catalog-content\") pod \"6cdce19a-2872-4dea-94d1-6497f104c890\" (UID: \"6cdce19a-2872-4dea-94d1-6497f104c890\") " Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.110892 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cdce19a-2872-4dea-94d1-6497f104c890-utilities\") pod \"6cdce19a-2872-4dea-94d1-6497f104c890\" (UID: \"6cdce19a-2872-4dea-94d1-6497f104c890\") " Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.112717 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6cdce19a-2872-4dea-94d1-6497f104c890-utilities" (OuterVolumeSpecName: "utilities") pod "6cdce19a-2872-4dea-94d1-6497f104c890" (UID: "6cdce19a-2872-4dea-94d1-6497f104c890"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.141898 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cdce19a-2872-4dea-94d1-6497f104c890-kube-api-access-jkc8m" (OuterVolumeSpecName: "kube-api-access-jkc8m") pod "6cdce19a-2872-4dea-94d1-6497f104c890" (UID: "6cdce19a-2872-4dea-94d1-6497f104c890"). InnerVolumeSpecName "kube-api-access-jkc8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.183985 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6cdce19a-2872-4dea-94d1-6497f104c890-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6cdce19a-2872-4dea-94d1-6497f104c890" (UID: "6cdce19a-2872-4dea-94d1-6497f104c890"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.218372 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkc8m\" (UniqueName: \"kubernetes.io/projected/6cdce19a-2872-4dea-94d1-6497f104c890-kube-api-access-jkc8m\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.218416 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cdce19a-2872-4dea-94d1-6497f104c890-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.218432 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cdce19a-2872-4dea-94d1-6497f104c890-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.266120 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6397c86f-66a1-4278-a210-5fe35904b1c3","Type":"ContainerStarted","Data":"d0fa955a9ca356f326a1cd98a9ff26f71d9853776d5e5b5c50516debda927a36"} Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.270119 4838 generic.go:334] "Generic (PLEG): container finished" podID="6cdce19a-2872-4dea-94d1-6497f104c890" containerID="634a64b28efa5eef64a92fa766c9c061426043b7409474f068724d0a5b70caf3" exitCode=0 Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.270224 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5g6n" event={"ID":"6cdce19a-2872-4dea-94d1-6497f104c890","Type":"ContainerDied","Data":"634a64b28efa5eef64a92fa766c9c061426043b7409474f068724d0a5b70caf3"} Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.270280 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5g6n" event={"ID":"6cdce19a-2872-4dea-94d1-6497f104c890","Type":"ContainerDied","Data":"c376051b3171964e92c98859670a1f2524b04e107f2f3c2a4bffae33a24c5a92"} Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.270330 4838 scope.go:117] "RemoveContainer" containerID="634a64b28efa5eef64a92fa766c9c061426043b7409474f068724d0a5b70caf3" Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.270509 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5g6n" Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.280120 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b6d9b193-d35a-40e7-87da-b20cfaca82b4","Type":"ContainerStarted","Data":"360a5607767e3d3e97ce6ce81ed71548e27c4415b186f92c566ae54b7a8cdfe4"} Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.280167 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b6d9b193-d35a-40e7-87da-b20cfaca82b4","Type":"ContainerStarted","Data":"36845134d3a4d2f0b8d900c1cf1ce071a74a58b5b1466b4fa94762e0c1735589"} Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.280180 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b6d9b193-d35a-40e7-87da-b20cfaca82b4","Type":"ContainerStarted","Data":"cd8c004fd97c281e35845e311ded48d1c4fc08c2eb89ad5b8f2e3387ef455700"} Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.305606 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.305589072 podStartE2EDuration="2.305589072s" podCreationTimestamp="2026-02-02 11:20:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:20:23.303183688 +0000 UTC m=+1617.640284716" watchObservedRunningTime="2026-02-02 11:20:23.305589072 +0000 UTC m=+1617.642690100" Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.311886 4838 scope.go:117] "RemoveContainer" containerID="8f20d1af6b82b75cb1f8f1d66396d964d1719d147821610773ef9f61124023ac" Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.343143 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5g6n"] Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.367296 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5g6n"] Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.370841 4838 scope.go:117] "RemoveContainer" containerID="bce426557a516289d643aee246d7e09b68780da1b5f0758cea76f4b93b9dbe71" Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.406600 4838 scope.go:117] "RemoveContainer" containerID="634a64b28efa5eef64a92fa766c9c061426043b7409474f068724d0a5b70caf3" Feb 02 11:20:23 crc kubenswrapper[4838]: E0202 11:20:23.407031 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"634a64b28efa5eef64a92fa766c9c061426043b7409474f068724d0a5b70caf3\": container with ID starting with 634a64b28efa5eef64a92fa766c9c061426043b7409474f068724d0a5b70caf3 not found: ID does not exist" containerID="634a64b28efa5eef64a92fa766c9c061426043b7409474f068724d0a5b70caf3" Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.407064 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"634a64b28efa5eef64a92fa766c9c061426043b7409474f068724d0a5b70caf3"} err="failed to get container status \"634a64b28efa5eef64a92fa766c9c061426043b7409474f068724d0a5b70caf3\": rpc error: code = NotFound desc = could not find container \"634a64b28efa5eef64a92fa766c9c061426043b7409474f068724d0a5b70caf3\": container with ID starting with 634a64b28efa5eef64a92fa766c9c061426043b7409474f068724d0a5b70caf3 not found: ID does not exist" Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.407087 4838 scope.go:117] "RemoveContainer" containerID="8f20d1af6b82b75cb1f8f1d66396d964d1719d147821610773ef9f61124023ac" Feb 02 11:20:23 crc kubenswrapper[4838]: E0202 11:20:23.407458 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f20d1af6b82b75cb1f8f1d66396d964d1719d147821610773ef9f61124023ac\": container with ID starting with 8f20d1af6b82b75cb1f8f1d66396d964d1719d147821610773ef9f61124023ac not found: ID does not exist" containerID="8f20d1af6b82b75cb1f8f1d66396d964d1719d147821610773ef9f61124023ac" Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.407477 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f20d1af6b82b75cb1f8f1d66396d964d1719d147821610773ef9f61124023ac"} err="failed to get container status \"8f20d1af6b82b75cb1f8f1d66396d964d1719d147821610773ef9f61124023ac\": rpc error: code = NotFound desc = could not find container \"8f20d1af6b82b75cb1f8f1d66396d964d1719d147821610773ef9f61124023ac\": container with ID starting with 8f20d1af6b82b75cb1f8f1d66396d964d1719d147821610773ef9f61124023ac not found: ID does not exist" Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.407489 4838 scope.go:117] "RemoveContainer" containerID="bce426557a516289d643aee246d7e09b68780da1b5f0758cea76f4b93b9dbe71" Feb 02 11:20:23 crc kubenswrapper[4838]: E0202 11:20:23.407763 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bce426557a516289d643aee246d7e09b68780da1b5f0758cea76f4b93b9dbe71\": container with ID starting with bce426557a516289d643aee246d7e09b68780da1b5f0758cea76f4b93b9dbe71 not found: ID does not exist" containerID="bce426557a516289d643aee246d7e09b68780da1b5f0758cea76f4b93b9dbe71" Feb 02 11:20:23 crc kubenswrapper[4838]: I0202 11:20:23.407794 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bce426557a516289d643aee246d7e09b68780da1b5f0758cea76f4b93b9dbe71"} err="failed to get container status \"bce426557a516289d643aee246d7e09b68780da1b5f0758cea76f4b93b9dbe71\": rpc error: code = NotFound desc = could not find container \"bce426557a516289d643aee246d7e09b68780da1b5f0758cea76f4b93b9dbe71\": container with ID starting with bce426557a516289d643aee246d7e09b68780da1b5f0758cea76f4b93b9dbe71 not found: ID does not exist" Feb 02 11:20:24 crc kubenswrapper[4838]: I0202 11:20:24.517526 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c81155e-9881-4ed2-bcec-7035aed80588" path="/var/lib/kubelet/pods/3c81155e-9881-4ed2-bcec-7035aed80588/volumes" Feb 02 11:20:24 crc kubenswrapper[4838]: I0202 11:20:24.518307 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6cdce19a-2872-4dea-94d1-6497f104c890" path="/var/lib/kubelet/pods/6cdce19a-2872-4dea-94d1-6497f104c890/volumes" Feb 02 11:20:24 crc kubenswrapper[4838]: I0202 11:20:24.797784 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 02 11:20:26 crc kubenswrapper[4838]: I0202 11:20:26.641678 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lq5ql" Feb 02 11:20:26 crc kubenswrapper[4838]: I0202 11:20:26.642348 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lq5ql" Feb 02 11:20:26 crc kubenswrapper[4838]: I0202 11:20:26.664105 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 11:20:26 crc kubenswrapper[4838]: I0202 11:20:26.665179 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 11:20:26 crc kubenswrapper[4838]: I0202 11:20:26.699423 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lq5ql" Feb 02 11:20:27 crc kubenswrapper[4838]: I0202 11:20:27.417671 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lq5ql" Feb 02 11:20:27 crc kubenswrapper[4838]: I0202 11:20:27.872233 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lq5ql"] Feb 02 11:20:28 crc kubenswrapper[4838]: I0202 11:20:28.358255 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6397c86f-66a1-4278-a210-5fe35904b1c3","Type":"ContainerStarted","Data":"643dfe2edc39986ca84bf00f679a75915b46a09cd137f1106c7cd0af4f901ab2"} Feb 02 11:20:28 crc kubenswrapper[4838]: I0202 11:20:28.358445 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6397c86f-66a1-4278-a210-5fe35904b1c3" containerName="ceilometer-central-agent" containerID="cri-o://8a170125169f6889d890b414ed6f12a20911947f4575596947989e8143a87112" gracePeriod=30 Feb 02 11:20:28 crc kubenswrapper[4838]: I0202 11:20:28.359041 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6397c86f-66a1-4278-a210-5fe35904b1c3" containerName="proxy-httpd" containerID="cri-o://643dfe2edc39986ca84bf00f679a75915b46a09cd137f1106c7cd0af4f901ab2" gracePeriod=30 Feb 02 11:20:28 crc kubenswrapper[4838]: I0202 11:20:28.359110 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6397c86f-66a1-4278-a210-5fe35904b1c3" containerName="sg-core" containerID="cri-o://d0fa955a9ca356f326a1cd98a9ff26f71d9853776d5e5b5c50516debda927a36" gracePeriod=30 Feb 02 11:20:28 crc kubenswrapper[4838]: I0202 11:20:28.359153 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6397c86f-66a1-4278-a210-5fe35904b1c3" containerName="ceilometer-notification-agent" containerID="cri-o://7025f97cc39b88876ab987ec82e0421127527baf447e1c1808d55357535e587a" gracePeriod=30 Feb 02 11:20:28 crc kubenswrapper[4838]: I0202 11:20:28.401071 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.573876053 podStartE2EDuration="11.40105078s" podCreationTimestamp="2026-02-02 11:20:17 +0000 UTC" firstStartedPulling="2026-02-02 11:20:18.309214504 +0000 UTC m=+1612.646315532" lastFinishedPulling="2026-02-02 11:20:27.136389221 +0000 UTC m=+1621.473490259" observedRunningTime="2026-02-02 11:20:28.399051757 +0000 UTC m=+1622.736152805" watchObservedRunningTime="2026-02-02 11:20:28.40105078 +0000 UTC m=+1622.738151808" Feb 02 11:20:28 crc kubenswrapper[4838]: I0202 11:20:28.792116 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 11:20:28 crc kubenswrapper[4838]: I0202 11:20:28.792171 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 11:20:29 crc kubenswrapper[4838]: I0202 11:20:29.369238 4838 generic.go:334] "Generic (PLEG): container finished" podID="6397c86f-66a1-4278-a210-5fe35904b1c3" containerID="643dfe2edc39986ca84bf00f679a75915b46a09cd137f1106c7cd0af4f901ab2" exitCode=0 Feb 02 11:20:29 crc kubenswrapper[4838]: I0202 11:20:29.369271 4838 generic.go:334] "Generic (PLEG): container finished" podID="6397c86f-66a1-4278-a210-5fe35904b1c3" containerID="d0fa955a9ca356f326a1cd98a9ff26f71d9853776d5e5b5c50516debda927a36" exitCode=2 Feb 02 11:20:29 crc kubenswrapper[4838]: I0202 11:20:29.369282 4838 generic.go:334] "Generic (PLEG): container finished" podID="6397c86f-66a1-4278-a210-5fe35904b1c3" containerID="7025f97cc39b88876ab987ec82e0421127527baf447e1c1808d55357535e587a" exitCode=0 Feb 02 11:20:29 crc kubenswrapper[4838]: I0202 11:20:29.369305 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6397c86f-66a1-4278-a210-5fe35904b1c3","Type":"ContainerDied","Data":"643dfe2edc39986ca84bf00f679a75915b46a09cd137f1106c7cd0af4f901ab2"} Feb 02 11:20:29 crc kubenswrapper[4838]: I0202 11:20:29.369344 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6397c86f-66a1-4278-a210-5fe35904b1c3","Type":"ContainerDied","Data":"d0fa955a9ca356f326a1cd98a9ff26f71d9853776d5e5b5c50516debda927a36"} Feb 02 11:20:29 crc kubenswrapper[4838]: I0202 11:20:29.369360 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6397c86f-66a1-4278-a210-5fe35904b1c3","Type":"ContainerDied","Data":"7025f97cc39b88876ab987ec82e0421127527baf447e1c1808d55357535e587a"} Feb 02 11:20:29 crc kubenswrapper[4838]: I0202 11:20:29.369485 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lq5ql" podUID="ba3f8bb4-fdbc-4534-b20c-df6f61e4f520" containerName="registry-server" containerID="cri-o://3cdd611b1be2d3ebe42022994f552f691fd0f980327a4ce4b87bcfe2894d8292" gracePeriod=2 Feb 02 11:20:29 crc kubenswrapper[4838]: I0202 11:20:29.798418 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 02 11:20:29 crc kubenswrapper[4838]: I0202 11:20:29.803884 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b3dc8cbc-2c9c-4192-99e3-7724d3c28c68" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.223:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 11:20:29 crc kubenswrapper[4838]: I0202 11:20:29.803884 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b3dc8cbc-2c9c-4192-99e3-7724d3c28c68" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.223:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 11:20:29 crc kubenswrapper[4838]: I0202 11:20:29.825656 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 02 11:20:30 crc kubenswrapper[4838]: I0202 11:20:30.425057 4838 generic.go:334] "Generic (PLEG): container finished" podID="ba3f8bb4-fdbc-4534-b20c-df6f61e4f520" containerID="3cdd611b1be2d3ebe42022994f552f691fd0f980327a4ce4b87bcfe2894d8292" exitCode=0 Feb 02 11:20:30 crc kubenswrapper[4838]: I0202 11:20:30.425719 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lq5ql" event={"ID":"ba3f8bb4-fdbc-4534-b20c-df6f61e4f520","Type":"ContainerDied","Data":"3cdd611b1be2d3ebe42022994f552f691fd0f980327a4ce4b87bcfe2894d8292"} Feb 02 11:20:30 crc kubenswrapper[4838]: I0202 11:20:30.425809 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lq5ql" event={"ID":"ba3f8bb4-fdbc-4534-b20c-df6f61e4f520","Type":"ContainerDied","Data":"5f2b2410b397022112f6e1683ebdba065e04fa472ea824bd4678ef27d59942f8"} Feb 02 11:20:30 crc kubenswrapper[4838]: I0202 11:20:30.425820 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f2b2410b397022112f6e1683ebdba065e04fa472ea824bd4678ef27d59942f8" Feb 02 11:20:30 crc kubenswrapper[4838]: I0202 11:20:30.459929 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 02 11:20:30 crc kubenswrapper[4838]: I0202 11:20:30.522877 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lq5ql" Feb 02 11:20:30 crc kubenswrapper[4838]: I0202 11:20:30.571747 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba3f8bb4-fdbc-4534-b20c-df6f61e4f520-catalog-content\") pod \"ba3f8bb4-fdbc-4534-b20c-df6f61e4f520\" (UID: \"ba3f8bb4-fdbc-4534-b20c-df6f61e4f520\") " Feb 02 11:20:30 crc kubenswrapper[4838]: I0202 11:20:30.571836 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba3f8bb4-fdbc-4534-b20c-df6f61e4f520-utilities\") pod \"ba3f8bb4-fdbc-4534-b20c-df6f61e4f520\" (UID: \"ba3f8bb4-fdbc-4534-b20c-df6f61e4f520\") " Feb 02 11:20:30 crc kubenswrapper[4838]: I0202 11:20:30.571963 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mxsc\" (UniqueName: \"kubernetes.io/projected/ba3f8bb4-fdbc-4534-b20c-df6f61e4f520-kube-api-access-9mxsc\") pod \"ba3f8bb4-fdbc-4534-b20c-df6f61e4f520\" (UID: \"ba3f8bb4-fdbc-4534-b20c-df6f61e4f520\") " Feb 02 11:20:30 crc kubenswrapper[4838]: I0202 11:20:30.572763 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba3f8bb4-fdbc-4534-b20c-df6f61e4f520-utilities" (OuterVolumeSpecName: "utilities") pod "ba3f8bb4-fdbc-4534-b20c-df6f61e4f520" (UID: "ba3f8bb4-fdbc-4534-b20c-df6f61e4f520"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:20:30 crc kubenswrapper[4838]: I0202 11:20:30.587142 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba3f8bb4-fdbc-4534-b20c-df6f61e4f520-kube-api-access-9mxsc" (OuterVolumeSpecName: "kube-api-access-9mxsc") pod "ba3f8bb4-fdbc-4534-b20c-df6f61e4f520" (UID: "ba3f8bb4-fdbc-4534-b20c-df6f61e4f520"). InnerVolumeSpecName "kube-api-access-9mxsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:20:30 crc kubenswrapper[4838]: I0202 11:20:30.644276 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba3f8bb4-fdbc-4534-b20c-df6f61e4f520-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ba3f8bb4-fdbc-4534-b20c-df6f61e4f520" (UID: "ba3f8bb4-fdbc-4534-b20c-df6f61e4f520"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:20:30 crc kubenswrapper[4838]: I0202 11:20:30.675525 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mxsc\" (UniqueName: \"kubernetes.io/projected/ba3f8bb4-fdbc-4534-b20c-df6f61e4f520-kube-api-access-9mxsc\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:30 crc kubenswrapper[4838]: I0202 11:20:30.675638 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba3f8bb4-fdbc-4534-b20c-df6f61e4f520-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:30 crc kubenswrapper[4838]: I0202 11:20:30.675658 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba3f8bb4-fdbc-4534-b20c-df6f61e4f520-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.327023 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.389541 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6397c86f-66a1-4278-a210-5fe35904b1c3-run-httpd\") pod \"6397c86f-66a1-4278-a210-5fe35904b1c3\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.389648 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-scripts\") pod \"6397c86f-66a1-4278-a210-5fe35904b1c3\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.389757 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6397c86f-66a1-4278-a210-5fe35904b1c3-log-httpd\") pod \"6397c86f-66a1-4278-a210-5fe35904b1c3\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.389994 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6397c86f-66a1-4278-a210-5fe35904b1c3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6397c86f-66a1-4278-a210-5fe35904b1c3" (UID: "6397c86f-66a1-4278-a210-5fe35904b1c3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.390009 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-ceilometer-tls-certs\") pod \"6397c86f-66a1-4278-a210-5fe35904b1c3\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.390086 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6397c86f-66a1-4278-a210-5fe35904b1c3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6397c86f-66a1-4278-a210-5fe35904b1c3" (UID: "6397c86f-66a1-4278-a210-5fe35904b1c3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.390106 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-config-data\") pod \"6397c86f-66a1-4278-a210-5fe35904b1c3\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.390233 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-combined-ca-bundle\") pod \"6397c86f-66a1-4278-a210-5fe35904b1c3\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.390271 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2l99\" (UniqueName: \"kubernetes.io/projected/6397c86f-66a1-4278-a210-5fe35904b1c3-kube-api-access-d2l99\") pod \"6397c86f-66a1-4278-a210-5fe35904b1c3\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.390345 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-sg-core-conf-yaml\") pod \"6397c86f-66a1-4278-a210-5fe35904b1c3\" (UID: \"6397c86f-66a1-4278-a210-5fe35904b1c3\") " Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.391242 4838 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6397c86f-66a1-4278-a210-5fe35904b1c3-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.391268 4838 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6397c86f-66a1-4278-a210-5fe35904b1c3-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.396479 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6397c86f-66a1-4278-a210-5fe35904b1c3-kube-api-access-d2l99" (OuterVolumeSpecName: "kube-api-access-d2l99") pod "6397c86f-66a1-4278-a210-5fe35904b1c3" (UID: "6397c86f-66a1-4278-a210-5fe35904b1c3"). InnerVolumeSpecName "kube-api-access-d2l99". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.398060 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-scripts" (OuterVolumeSpecName: "scripts") pod "6397c86f-66a1-4278-a210-5fe35904b1c3" (UID: "6397c86f-66a1-4278-a210-5fe35904b1c3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.470686 4838 generic.go:334] "Generic (PLEG): container finished" podID="6397c86f-66a1-4278-a210-5fe35904b1c3" containerID="8a170125169f6889d890b414ed6f12a20911947f4575596947989e8143a87112" exitCode=0 Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.470818 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6397c86f-66a1-4278-a210-5fe35904b1c3","Type":"ContainerDied","Data":"8a170125169f6889d890b414ed6f12a20911947f4575596947989e8143a87112"} Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.470871 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6397c86f-66a1-4278-a210-5fe35904b1c3","Type":"ContainerDied","Data":"ba63162d9334b8000f7ca271e062b94069004dea745ee64a5fd075649e936f59"} Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.470889 4838 scope.go:117] "RemoveContainer" containerID="643dfe2edc39986ca84bf00f679a75915b46a09cd137f1106c7cd0af4f901ab2" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.470895 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lq5ql" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.471281 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.473903 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6397c86f-66a1-4278-a210-5fe35904b1c3" (UID: "6397c86f-66a1-4278-a210-5fe35904b1c3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.498389 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2l99\" (UniqueName: \"kubernetes.io/projected/6397c86f-66a1-4278-a210-5fe35904b1c3-kube-api-access-d2l99\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.498440 4838 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.498452 4838 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.524268 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "6397c86f-66a1-4278-a210-5fe35904b1c3" (UID: "6397c86f-66a1-4278-a210-5fe35904b1c3"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.549715 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lq5ql"] Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.558006 4838 scope.go:117] "RemoveContainer" containerID="d0fa955a9ca356f326a1cd98a9ff26f71d9853776d5e5b5c50516debda927a36" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.561889 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lq5ql"] Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.569563 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6397c86f-66a1-4278-a210-5fe35904b1c3" (UID: "6397c86f-66a1-4278-a210-5fe35904b1c3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.579728 4838 scope.go:117] "RemoveContainer" containerID="7025f97cc39b88876ab987ec82e0421127527baf447e1c1808d55357535e587a" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.583777 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-config-data" (OuterVolumeSpecName: "config-data") pod "6397c86f-66a1-4278-a210-5fe35904b1c3" (UID: "6397c86f-66a1-4278-a210-5fe35904b1c3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.600597 4838 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.600653 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.600665 4838 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6397c86f-66a1-4278-a210-5fe35904b1c3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.664860 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.665978 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.696874 4838 scope.go:117] "RemoveContainer" containerID="8a170125169f6889d890b414ed6f12a20911947f4575596947989e8143a87112" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.721314 4838 scope.go:117] "RemoveContainer" containerID="643dfe2edc39986ca84bf00f679a75915b46a09cd137f1106c7cd0af4f901ab2" Feb 02 11:20:31 crc kubenswrapper[4838]: E0202 11:20:31.722875 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"643dfe2edc39986ca84bf00f679a75915b46a09cd137f1106c7cd0af4f901ab2\": container with ID starting with 643dfe2edc39986ca84bf00f679a75915b46a09cd137f1106c7cd0af4f901ab2 not found: ID does not exist" containerID="643dfe2edc39986ca84bf00f679a75915b46a09cd137f1106c7cd0af4f901ab2" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.722908 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"643dfe2edc39986ca84bf00f679a75915b46a09cd137f1106c7cd0af4f901ab2"} err="failed to get container status \"643dfe2edc39986ca84bf00f679a75915b46a09cd137f1106c7cd0af4f901ab2\": rpc error: code = NotFound desc = could not find container \"643dfe2edc39986ca84bf00f679a75915b46a09cd137f1106c7cd0af4f901ab2\": container with ID starting with 643dfe2edc39986ca84bf00f679a75915b46a09cd137f1106c7cd0af4f901ab2 not found: ID does not exist" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.722932 4838 scope.go:117] "RemoveContainer" containerID="d0fa955a9ca356f326a1cd98a9ff26f71d9853776d5e5b5c50516debda927a36" Feb 02 11:20:31 crc kubenswrapper[4838]: E0202 11:20:31.724141 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0fa955a9ca356f326a1cd98a9ff26f71d9853776d5e5b5c50516debda927a36\": container with ID starting with d0fa955a9ca356f326a1cd98a9ff26f71d9853776d5e5b5c50516debda927a36 not found: ID does not exist" containerID="d0fa955a9ca356f326a1cd98a9ff26f71d9853776d5e5b5c50516debda927a36" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.724170 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0fa955a9ca356f326a1cd98a9ff26f71d9853776d5e5b5c50516debda927a36"} err="failed to get container status \"d0fa955a9ca356f326a1cd98a9ff26f71d9853776d5e5b5c50516debda927a36\": rpc error: code = NotFound desc = could not find container \"d0fa955a9ca356f326a1cd98a9ff26f71d9853776d5e5b5c50516debda927a36\": container with ID starting with d0fa955a9ca356f326a1cd98a9ff26f71d9853776d5e5b5c50516debda927a36 not found: ID does not exist" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.724195 4838 scope.go:117] "RemoveContainer" containerID="7025f97cc39b88876ab987ec82e0421127527baf447e1c1808d55357535e587a" Feb 02 11:20:31 crc kubenswrapper[4838]: E0202 11:20:31.724521 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7025f97cc39b88876ab987ec82e0421127527baf447e1c1808d55357535e587a\": container with ID starting with 7025f97cc39b88876ab987ec82e0421127527baf447e1c1808d55357535e587a not found: ID does not exist" containerID="7025f97cc39b88876ab987ec82e0421127527baf447e1c1808d55357535e587a" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.724541 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7025f97cc39b88876ab987ec82e0421127527baf447e1c1808d55357535e587a"} err="failed to get container status \"7025f97cc39b88876ab987ec82e0421127527baf447e1c1808d55357535e587a\": rpc error: code = NotFound desc = could not find container \"7025f97cc39b88876ab987ec82e0421127527baf447e1c1808d55357535e587a\": container with ID starting with 7025f97cc39b88876ab987ec82e0421127527baf447e1c1808d55357535e587a not found: ID does not exist" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.724579 4838 scope.go:117] "RemoveContainer" containerID="8a170125169f6889d890b414ed6f12a20911947f4575596947989e8143a87112" Feb 02 11:20:31 crc kubenswrapper[4838]: E0202 11:20:31.724852 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a170125169f6889d890b414ed6f12a20911947f4575596947989e8143a87112\": container with ID starting with 8a170125169f6889d890b414ed6f12a20911947f4575596947989e8143a87112 not found: ID does not exist" containerID="8a170125169f6889d890b414ed6f12a20911947f4575596947989e8143a87112" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.724872 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a170125169f6889d890b414ed6f12a20911947f4575596947989e8143a87112"} err="failed to get container status \"8a170125169f6889d890b414ed6f12a20911947f4575596947989e8143a87112\": rpc error: code = NotFound desc = could not find container \"8a170125169f6889d890b414ed6f12a20911947f4575596947989e8143a87112\": container with ID starting with 8a170125169f6889d890b414ed6f12a20911947f4575596947989e8143a87112 not found: ID does not exist" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.811714 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.826505 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.859528 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:20:31 crc kubenswrapper[4838]: E0202 11:20:31.860025 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cdce19a-2872-4dea-94d1-6497f104c890" containerName="registry-server" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.860048 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cdce19a-2872-4dea-94d1-6497f104c890" containerName="registry-server" Feb 02 11:20:31 crc kubenswrapper[4838]: E0202 11:20:31.860066 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba3f8bb4-fdbc-4534-b20c-df6f61e4f520" containerName="extract-utilities" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.860075 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba3f8bb4-fdbc-4534-b20c-df6f61e4f520" containerName="extract-utilities" Feb 02 11:20:31 crc kubenswrapper[4838]: E0202 11:20:31.860086 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6397c86f-66a1-4278-a210-5fe35904b1c3" containerName="sg-core" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.860094 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="6397c86f-66a1-4278-a210-5fe35904b1c3" containerName="sg-core" Feb 02 11:20:31 crc kubenswrapper[4838]: E0202 11:20:31.860120 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6397c86f-66a1-4278-a210-5fe35904b1c3" containerName="proxy-httpd" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.860130 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="6397c86f-66a1-4278-a210-5fe35904b1c3" containerName="proxy-httpd" Feb 02 11:20:31 crc kubenswrapper[4838]: E0202 11:20:31.860144 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cdce19a-2872-4dea-94d1-6497f104c890" containerName="extract-utilities" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.860153 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cdce19a-2872-4dea-94d1-6497f104c890" containerName="extract-utilities" Feb 02 11:20:31 crc kubenswrapper[4838]: E0202 11:20:31.860169 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6397c86f-66a1-4278-a210-5fe35904b1c3" containerName="ceilometer-central-agent" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.860176 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="6397c86f-66a1-4278-a210-5fe35904b1c3" containerName="ceilometer-central-agent" Feb 02 11:20:31 crc kubenswrapper[4838]: E0202 11:20:31.860192 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cdce19a-2872-4dea-94d1-6497f104c890" containerName="extract-content" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.860199 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cdce19a-2872-4dea-94d1-6497f104c890" containerName="extract-content" Feb 02 11:20:31 crc kubenswrapper[4838]: E0202 11:20:31.860222 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba3f8bb4-fdbc-4534-b20c-df6f61e4f520" containerName="registry-server" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.860230 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba3f8bb4-fdbc-4534-b20c-df6f61e4f520" containerName="registry-server" Feb 02 11:20:31 crc kubenswrapper[4838]: E0202 11:20:31.860243 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba3f8bb4-fdbc-4534-b20c-df6f61e4f520" containerName="extract-content" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.860250 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba3f8bb4-fdbc-4534-b20c-df6f61e4f520" containerName="extract-content" Feb 02 11:20:31 crc kubenswrapper[4838]: E0202 11:20:31.860264 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6397c86f-66a1-4278-a210-5fe35904b1c3" containerName="ceilometer-notification-agent" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.860271 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="6397c86f-66a1-4278-a210-5fe35904b1c3" containerName="ceilometer-notification-agent" Feb 02 11:20:31 crc kubenswrapper[4838]: E0202 11:20:31.860281 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c81155e-9881-4ed2-bcec-7035aed80588" containerName="init" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.860289 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c81155e-9881-4ed2-bcec-7035aed80588" containerName="init" Feb 02 11:20:31 crc kubenswrapper[4838]: E0202 11:20:31.860302 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c81155e-9881-4ed2-bcec-7035aed80588" containerName="dnsmasq-dns" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.860309 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c81155e-9881-4ed2-bcec-7035aed80588" containerName="dnsmasq-dns" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.860570 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c81155e-9881-4ed2-bcec-7035aed80588" containerName="dnsmasq-dns" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.860592 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cdce19a-2872-4dea-94d1-6497f104c890" containerName="registry-server" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.860601 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba3f8bb4-fdbc-4534-b20c-df6f61e4f520" containerName="registry-server" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.860660 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="6397c86f-66a1-4278-a210-5fe35904b1c3" containerName="ceilometer-notification-agent" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.860678 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="6397c86f-66a1-4278-a210-5fe35904b1c3" containerName="ceilometer-central-agent" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.860692 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="6397c86f-66a1-4278-a210-5fe35904b1c3" containerName="proxy-httpd" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.860702 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="6397c86f-66a1-4278-a210-5fe35904b1c3" containerName="sg-core" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.862752 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.865093 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.869112 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.880291 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.896422 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.924809 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b00c3f84-6034-4c0b-ad6c-52845d6743aa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.924897 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7fsx\" (UniqueName: \"kubernetes.io/projected/b00c3f84-6034-4c0b-ad6c-52845d6743aa-kube-api-access-c7fsx\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.924946 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b00c3f84-6034-4c0b-ad6c-52845d6743aa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.924985 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b00c3f84-6034-4c0b-ad6c-52845d6743aa-log-httpd\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.925024 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b00c3f84-6034-4c0b-ad6c-52845d6743aa-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.925067 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b00c3f84-6034-4c0b-ad6c-52845d6743aa-config-data\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.925091 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b00c3f84-6034-4c0b-ad6c-52845d6743aa-run-httpd\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:31 crc kubenswrapper[4838]: I0202 11:20:31.925215 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b00c3f84-6034-4c0b-ad6c-52845d6743aa-scripts\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.027777 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b00c3f84-6034-4c0b-ad6c-52845d6743aa-scripts\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.028094 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b00c3f84-6034-4c0b-ad6c-52845d6743aa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.028128 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7fsx\" (UniqueName: \"kubernetes.io/projected/b00c3f84-6034-4c0b-ad6c-52845d6743aa-kube-api-access-c7fsx\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.028161 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b00c3f84-6034-4c0b-ad6c-52845d6743aa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.028190 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b00c3f84-6034-4c0b-ad6c-52845d6743aa-log-httpd\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.028222 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b00c3f84-6034-4c0b-ad6c-52845d6743aa-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.028257 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b00c3f84-6034-4c0b-ad6c-52845d6743aa-config-data\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.028280 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b00c3f84-6034-4c0b-ad6c-52845d6743aa-run-httpd\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.029085 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b00c3f84-6034-4c0b-ad6c-52845d6743aa-log-httpd\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.029144 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b00c3f84-6034-4c0b-ad6c-52845d6743aa-run-httpd\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.041812 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b00c3f84-6034-4c0b-ad6c-52845d6743aa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.042436 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b00c3f84-6034-4c0b-ad6c-52845d6743aa-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.049350 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b00c3f84-6034-4c0b-ad6c-52845d6743aa-scripts\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.051148 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b00c3f84-6034-4c0b-ad6c-52845d6743aa-config-data\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.062441 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b00c3f84-6034-4c0b-ad6c-52845d6743aa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.073300 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7fsx\" (UniqueName: \"kubernetes.io/projected/b00c3f84-6034-4c0b-ad6c-52845d6743aa-kube-api-access-c7fsx\") pod \"ceilometer-0\" (UID: \"b00c3f84-6034-4c0b-ad6c-52845d6743aa\") " pod="openstack/ceilometer-0" Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.182751 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.522338 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6397c86f-66a1-4278-a210-5fe35904b1c3" path="/var/lib/kubelet/pods/6397c86f-66a1-4278-a210-5fe35904b1c3/volumes" Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.523592 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba3f8bb4-fdbc-4534-b20c-df6f61e4f520" path="/var/lib/kubelet/pods/ba3f8bb4-fdbc-4534-b20c-df6f61e4f520/volumes" Feb 02 11:20:32 crc kubenswrapper[4838]: W0202 11:20:32.651156 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb00c3f84_6034_4c0b_ad6c_52845d6743aa.slice/crio-20298643112fe78379dce8d547c2e6121eed666ba389c29941bdf28b4237ec08 WatchSource:0}: Error finding container 20298643112fe78379dce8d547c2e6121eed666ba389c29941bdf28b4237ec08: Status 404 returned error can't find the container with id 20298643112fe78379dce8d547c2e6121eed666ba389c29941bdf28b4237ec08 Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.651244 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.683931 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b6d9b193-d35a-40e7-87da-b20cfaca82b4" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.225:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 11:20:32 crc kubenswrapper[4838]: I0202 11:20:32.683965 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b6d9b193-d35a-40e7-87da-b20cfaca82b4" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.225:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 11:20:33 crc kubenswrapper[4838]: I0202 11:20:33.491213 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b00c3f84-6034-4c0b-ad6c-52845d6743aa","Type":"ContainerStarted","Data":"20298643112fe78379dce8d547c2e6121eed666ba389c29941bdf28b4237ec08"} Feb 02 11:20:34 crc kubenswrapper[4838]: I0202 11:20:34.500865 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b00c3f84-6034-4c0b-ad6c-52845d6743aa","Type":"ContainerStarted","Data":"bf5ffc608588216f00467df715a3758daa6c69517cb1121ea22f0064eb66cda1"} Feb 02 11:20:36 crc kubenswrapper[4838]: I0202 11:20:36.550545 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b00c3f84-6034-4c0b-ad6c-52845d6743aa","Type":"ContainerStarted","Data":"7cd754582d02f1af0118cbf821ff93a7a40e8890f9d045a23ab1e7b311bcd01d"} Feb 02 11:20:37 crc kubenswrapper[4838]: I0202 11:20:37.565425 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b00c3f84-6034-4c0b-ad6c-52845d6743aa","Type":"ContainerStarted","Data":"5d5004ee25273df918e1055d2b9c529714ee7716384761e5e44cfa5ae32eaa1b"} Feb 02 11:20:38 crc kubenswrapper[4838]: I0202 11:20:38.801069 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 02 11:20:38 crc kubenswrapper[4838]: I0202 11:20:38.801642 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 02 11:20:38 crc kubenswrapper[4838]: I0202 11:20:38.808375 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 02 11:20:38 crc kubenswrapper[4838]: I0202 11:20:38.809538 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 02 11:20:39 crc kubenswrapper[4838]: I0202 11:20:39.599669 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b00c3f84-6034-4c0b-ad6c-52845d6743aa","Type":"ContainerStarted","Data":"43750ebb6e7888edc7c78c57959918af534dfe61d7a8d343d59cdefbc37d2884"} Feb 02 11:20:39 crc kubenswrapper[4838]: I0202 11:20:39.600607 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 02 11:20:39 crc kubenswrapper[4838]: I0202 11:20:39.608192 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 02 11:20:39 crc kubenswrapper[4838]: I0202 11:20:39.640706 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.098723685 podStartE2EDuration="8.640682604s" podCreationTimestamp="2026-02-02 11:20:31 +0000 UTC" firstStartedPulling="2026-02-02 11:20:32.654392822 +0000 UTC m=+1626.991493850" lastFinishedPulling="2026-02-02 11:20:39.196351741 +0000 UTC m=+1633.533452769" observedRunningTime="2026-02-02 11:20:39.63002565 +0000 UTC m=+1633.967126708" watchObservedRunningTime="2026-02-02 11:20:39.640682604 +0000 UTC m=+1633.977783632" Feb 02 11:20:40 crc kubenswrapper[4838]: I0202 11:20:40.609107 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 11:20:41 crc kubenswrapper[4838]: I0202 11:20:41.669873 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 02 11:20:41 crc kubenswrapper[4838]: I0202 11:20:41.671228 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 02 11:20:41 crc kubenswrapper[4838]: I0202 11:20:41.677374 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 02 11:20:42 crc kubenswrapper[4838]: I0202 11:20:42.636170 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 02 11:20:45 crc kubenswrapper[4838]: I0202 11:20:45.429794 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:20:45 crc kubenswrapper[4838]: I0202 11:20:45.430239 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:20:45 crc kubenswrapper[4838]: I0202 11:20:45.430305 4838 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 11:20:45 crc kubenswrapper[4838]: I0202 11:20:45.431364 4838 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9"} pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 11:20:45 crc kubenswrapper[4838]: I0202 11:20:45.431430 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" containerID="cri-o://0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" gracePeriod=600 Feb 02 11:20:45 crc kubenswrapper[4838]: I0202 11:20:45.655106 4838 generic.go:334] "Generic (PLEG): container finished" podID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" exitCode=0 Feb 02 11:20:45 crc kubenswrapper[4838]: I0202 11:20:45.655164 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerDied","Data":"0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9"} Feb 02 11:20:45 crc kubenswrapper[4838]: I0202 11:20:45.655261 4838 scope.go:117] "RemoveContainer" containerID="59dac7f34e4b14b86296ead42a59d6f6e3f3b9fd93372b24781304406104890f" Feb 02 11:20:45 crc kubenswrapper[4838]: E0202 11:20:45.804303 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:20:46 crc kubenswrapper[4838]: I0202 11:20:46.667315 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:20:46 crc kubenswrapper[4838]: E0202 11:20:46.667849 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:20:58 crc kubenswrapper[4838]: I0202 11:20:58.505857 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:20:58 crc kubenswrapper[4838]: E0202 11:20:58.506739 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:20:58 crc kubenswrapper[4838]: I0202 11:20:58.776108 4838 generic.go:334] "Generic (PLEG): container finished" podID="3318b8c1-22ca-45c4-a2fd-90205cea5a72" containerID="f25a2284d2dde9d906c1e133341c3751f6f15de2b2d4c39c678b0d2acb325e90" exitCode=0 Feb 02 11:20:58 crc kubenswrapper[4838]: I0202 11:20:58.776156 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-conductor-0" event={"ID":"3318b8c1-22ca-45c4-a2fd-90205cea5a72","Type":"ContainerDied","Data":"f25a2284d2dde9d906c1e133341c3751f6f15de2b2d4c39c678b0d2acb325e90"} Feb 02 11:20:59 crc kubenswrapper[4838]: I0202 11:20:59.808688 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-conductor-0" event={"ID":"3318b8c1-22ca-45c4-a2fd-90205cea5a72","Type":"ContainerStarted","Data":"8673283a4f79ed376ac2f1636ff45e1733bad9de53d976d6333de4cc23412250"} Feb 02 11:20:59 crc kubenswrapper[4838]: I0202 11:20:59.809129 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-conductor-0" event={"ID":"3318b8c1-22ca-45c4-a2fd-90205cea5a72","Type":"ContainerStarted","Data":"ee1c01f481821c6dbc2bc38bc8135bd8e60199c3690f6e8240c79140a71190e3"} Feb 02 11:21:00 crc kubenswrapper[4838]: I0202 11:21:00.825708 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-conductor-0" event={"ID":"3318b8c1-22ca-45c4-a2fd-90205cea5a72","Type":"ContainerStarted","Data":"9e1127954d2c0425ce090f8b020ea55a92cbf66d1d24a9e8e5335593987e5eb0"} Feb 02 11:21:00 crc kubenswrapper[4838]: I0202 11:21:00.825919 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ironic-conductor-0" Feb 02 11:21:00 crc kubenswrapper[4838]: I0202 11:21:00.859976 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ironic-conductor-0" podStartSLOduration=134.13016899 podStartE2EDuration="3m0.859956218s" podCreationTimestamp="2026-02-02 11:18:00 +0000 UTC" firstStartedPulling="2026-02-02 11:18:04.455728174 +0000 UTC m=+1478.792829202" lastFinishedPulling="2026-02-02 11:18:51.185515402 +0000 UTC m=+1525.522616430" observedRunningTime="2026-02-02 11:21:00.853777843 +0000 UTC m=+1655.190878881" watchObservedRunningTime="2026-02-02 11:21:00.859956218 +0000 UTC m=+1655.197057246" Feb 02 11:21:02 crc kubenswrapper[4838]: I0202 11:21:02.199702 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 02 11:21:02 crc kubenswrapper[4838]: I0202 11:21:02.370248 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ironic-conductor-0" Feb 02 11:21:04 crc kubenswrapper[4838]: I0202 11:21:04.080988 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ironic-conductor-0" Feb 02 11:21:04 crc kubenswrapper[4838]: I0202 11:21:04.083975 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ironic-conductor-0" Feb 02 11:21:11 crc kubenswrapper[4838]: I0202 11:21:11.574660 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 11:21:12 crc kubenswrapper[4838]: I0202 11:21:12.506425 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:21:12 crc kubenswrapper[4838]: E0202 11:21:12.507032 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:21:12 crc kubenswrapper[4838]: I0202 11:21:12.517591 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 11:21:16 crc kubenswrapper[4838]: I0202 11:21:16.279579 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="698d5f5d-683c-4130-8d4f-d1d59b5d32e4" containerName="rabbitmq" containerID="cri-o://5609c1ee9b0d2c9dc9161b9a6e7b9324e75153305cbea82c771065f24a03a445" gracePeriod=604796 Feb 02 11:21:16 crc kubenswrapper[4838]: I0202 11:21:16.942385 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="10f55730-6ea0-4989-a006-b0549f5566a7" containerName="rabbitmq" containerID="cri-o://6a52eb6196a3e58500dc0f8f670fcaaf4bc4f7075c3b6a2c4342f51a72e7519b" gracePeriod=604796 Feb 02 11:21:17 crc kubenswrapper[4838]: I0202 11:21:17.441939 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="698d5f5d-683c-4130-8d4f-d1d59b5d32e4" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.98:5671: connect: connection refused" Feb 02 11:21:17 crc kubenswrapper[4838]: I0202 11:21:17.771263 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="10f55730-6ea0-4989-a006-b0549f5566a7" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.99:5671: connect: connection refused" Feb 02 11:21:22 crc kubenswrapper[4838]: I0202 11:21:22.970968 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.017738 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-config-data\") pod \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.017801 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.017847 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-plugins\") pod \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.017895 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-erlang-cookie-secret\") pod \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.017921 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-pod-info\") pod \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.017943 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-erlang-cookie\") pod \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.017967 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-tls\") pod \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.017999 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bn88t\" (UniqueName: \"kubernetes.io/projected/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-kube-api-access-bn88t\") pod \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.018082 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-server-conf\") pod \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.018119 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-confd\") pod \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.018192 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-plugins-conf\") pod \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\" (UID: \"698d5f5d-683c-4130-8d4f-d1d59b5d32e4\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.018920 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "698d5f5d-683c-4130-8d4f-d1d59b5d32e4" (UID: "698d5f5d-683c-4130-8d4f-d1d59b5d32e4"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.019255 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "698d5f5d-683c-4130-8d4f-d1d59b5d32e4" (UID: "698d5f5d-683c-4130-8d4f-d1d59b5d32e4"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.022071 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "698d5f5d-683c-4130-8d4f-d1d59b5d32e4" (UID: "698d5f5d-683c-4130-8d4f-d1d59b5d32e4"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.025251 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "698d5f5d-683c-4130-8d4f-d1d59b5d32e4" (UID: "698d5f5d-683c-4130-8d4f-d1d59b5d32e4"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.025852 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-kube-api-access-bn88t" (OuterVolumeSpecName: "kube-api-access-bn88t") pod "698d5f5d-683c-4130-8d4f-d1d59b5d32e4" (UID: "698d5f5d-683c-4130-8d4f-d1d59b5d32e4"). InnerVolumeSpecName "kube-api-access-bn88t". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.027316 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "698d5f5d-683c-4130-8d4f-d1d59b5d32e4" (UID: "698d5f5d-683c-4130-8d4f-d1d59b5d32e4"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.029595 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-pod-info" (OuterVolumeSpecName: "pod-info") pod "698d5f5d-683c-4130-8d4f-d1d59b5d32e4" (UID: "698d5f5d-683c-4130-8d4f-d1d59b5d32e4"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.047004 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "698d5f5d-683c-4130-8d4f-d1d59b5d32e4" (UID: "698d5f5d-683c-4130-8d4f-d1d59b5d32e4"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.059193 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-config-data" (OuterVolumeSpecName: "config-data") pod "698d5f5d-683c-4130-8d4f-d1d59b5d32e4" (UID: "698d5f5d-683c-4130-8d4f-d1d59b5d32e4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.076719 4838 generic.go:334] "Generic (PLEG): container finished" podID="698d5f5d-683c-4130-8d4f-d1d59b5d32e4" containerID="5609c1ee9b0d2c9dc9161b9a6e7b9324e75153305cbea82c771065f24a03a445" exitCode=0 Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.076766 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"698d5f5d-683c-4130-8d4f-d1d59b5d32e4","Type":"ContainerDied","Data":"5609c1ee9b0d2c9dc9161b9a6e7b9324e75153305cbea82c771065f24a03a445"} Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.076794 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"698d5f5d-683c-4130-8d4f-d1d59b5d32e4","Type":"ContainerDied","Data":"20eef4683ab1fdb6d7b507d1690c2129630d8517a6e47c040711b0e7bf93e35a"} Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.076811 4838 scope.go:117] "RemoveContainer" containerID="5609c1ee9b0d2c9dc9161b9a6e7b9324e75153305cbea82c771065f24a03a445" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.077027 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.105791 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-server-conf" (OuterVolumeSpecName: "server-conf") pod "698d5f5d-683c-4130-8d4f-d1d59b5d32e4" (UID: "698d5f5d-683c-4130-8d4f-d1d59b5d32e4"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.127983 4838 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-server-conf\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.128021 4838 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-plugins-conf\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.128031 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.128060 4838 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.128131 4838 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.128145 4838 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.128153 4838 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-pod-info\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.128163 4838 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.128172 4838 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.128183 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bn88t\" (UniqueName: \"kubernetes.io/projected/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-kube-api-access-bn88t\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.153085 4838 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.172646 4838 scope.go:117] "RemoveContainer" containerID="bac490b36b57a0475b02c67e445444a2d8f9bcdd6b9fb515df62f3f92e6d09e1" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.230007 4838 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.237849 4838 scope.go:117] "RemoveContainer" containerID="5609c1ee9b0d2c9dc9161b9a6e7b9324e75153305cbea82c771065f24a03a445" Feb 02 11:21:23 crc kubenswrapper[4838]: E0202 11:21:23.239506 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5609c1ee9b0d2c9dc9161b9a6e7b9324e75153305cbea82c771065f24a03a445\": container with ID starting with 5609c1ee9b0d2c9dc9161b9a6e7b9324e75153305cbea82c771065f24a03a445 not found: ID does not exist" containerID="5609c1ee9b0d2c9dc9161b9a6e7b9324e75153305cbea82c771065f24a03a445" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.239563 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5609c1ee9b0d2c9dc9161b9a6e7b9324e75153305cbea82c771065f24a03a445"} err="failed to get container status \"5609c1ee9b0d2c9dc9161b9a6e7b9324e75153305cbea82c771065f24a03a445\": rpc error: code = NotFound desc = could not find container \"5609c1ee9b0d2c9dc9161b9a6e7b9324e75153305cbea82c771065f24a03a445\": container with ID starting with 5609c1ee9b0d2c9dc9161b9a6e7b9324e75153305cbea82c771065f24a03a445 not found: ID does not exist" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.239593 4838 scope.go:117] "RemoveContainer" containerID="bac490b36b57a0475b02c67e445444a2d8f9bcdd6b9fb515df62f3f92e6d09e1" Feb 02 11:21:23 crc kubenswrapper[4838]: E0202 11:21:23.240025 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bac490b36b57a0475b02c67e445444a2d8f9bcdd6b9fb515df62f3f92e6d09e1\": container with ID starting with bac490b36b57a0475b02c67e445444a2d8f9bcdd6b9fb515df62f3f92e6d09e1 not found: ID does not exist" containerID="bac490b36b57a0475b02c67e445444a2d8f9bcdd6b9fb515df62f3f92e6d09e1" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.240066 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bac490b36b57a0475b02c67e445444a2d8f9bcdd6b9fb515df62f3f92e6d09e1"} err="failed to get container status \"bac490b36b57a0475b02c67e445444a2d8f9bcdd6b9fb515df62f3f92e6d09e1\": rpc error: code = NotFound desc = could not find container \"bac490b36b57a0475b02c67e445444a2d8f9bcdd6b9fb515df62f3f92e6d09e1\": container with ID starting with bac490b36b57a0475b02c67e445444a2d8f9bcdd6b9fb515df62f3f92e6d09e1 not found: ID does not exist" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.256899 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "698d5f5d-683c-4130-8d4f-d1d59b5d32e4" (UID: "698d5f5d-683c-4130-8d4f-d1d59b5d32e4"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.332381 4838 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/698d5f5d-683c-4130-8d4f-d1d59b5d32e4-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.447909 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.460765 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.473685 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 11:21:23 crc kubenswrapper[4838]: E0202 11:21:23.474167 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="698d5f5d-683c-4130-8d4f-d1d59b5d32e4" containerName="rabbitmq" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.474183 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="698d5f5d-683c-4130-8d4f-d1d59b5d32e4" containerName="rabbitmq" Feb 02 11:21:23 crc kubenswrapper[4838]: E0202 11:21:23.474214 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="698d5f5d-683c-4130-8d4f-d1d59b5d32e4" containerName="setup-container" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.474221 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="698d5f5d-683c-4130-8d4f-d1d59b5d32e4" containerName="setup-container" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.474402 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="698d5f5d-683c-4130-8d4f-d1d59b5d32e4" containerName="rabbitmq" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.475472 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.480282 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.481508 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.481843 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.482014 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-wr5z9" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.482317 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.483223 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.483460 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.483665 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.507436 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:21:23 crc kubenswrapper[4838]: E0202 11:21:23.507968 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.596165 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.657937 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c6516e19-8887-4dda-a635-bc93da2a19a6-config-data\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.657993 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c6516e19-8887-4dda-a635-bc93da2a19a6-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.658261 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c6516e19-8887-4dda-a635-bc93da2a19a6-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.658318 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c6516e19-8887-4dda-a635-bc93da2a19a6-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.658466 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c6516e19-8887-4dda-a635-bc93da2a19a6-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.658500 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c6516e19-8887-4dda-a635-bc93da2a19a6-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.658547 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llkq9\" (UniqueName: \"kubernetes.io/projected/c6516e19-8887-4dda-a635-bc93da2a19a6-kube-api-access-llkq9\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.658564 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c6516e19-8887-4dda-a635-bc93da2a19a6-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.658598 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c6516e19-8887-4dda-a635-bc93da2a19a6-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.658674 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c6516e19-8887-4dda-a635-bc93da2a19a6-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.658748 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.760078 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-erlang-cookie\") pod \"10f55730-6ea0-4989-a006-b0549f5566a7\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.760230 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/10f55730-6ea0-4989-a006-b0549f5566a7-plugins-conf\") pod \"10f55730-6ea0-4989-a006-b0549f5566a7\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.760304 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-plugins\") pod \"10f55730-6ea0-4989-a006-b0549f5566a7\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.760349 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/10f55730-6ea0-4989-a006-b0549f5566a7-server-conf\") pod \"10f55730-6ea0-4989-a006-b0549f5566a7\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.760396 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/10f55730-6ea0-4989-a006-b0549f5566a7-config-data\") pod \"10f55730-6ea0-4989-a006-b0549f5566a7\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.760430 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-confd\") pod \"10f55730-6ea0-4989-a006-b0549f5566a7\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.760477 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/10f55730-6ea0-4989-a006-b0549f5566a7-pod-info\") pod \"10f55730-6ea0-4989-a006-b0549f5566a7\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.760581 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/10f55730-6ea0-4989-a006-b0549f5566a7-erlang-cookie-secret\") pod \"10f55730-6ea0-4989-a006-b0549f5566a7\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.760639 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-tls\") pod \"10f55730-6ea0-4989-a006-b0549f5566a7\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.760678 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rmtv6\" (UniqueName: \"kubernetes.io/projected/10f55730-6ea0-4989-a006-b0549f5566a7-kube-api-access-rmtv6\") pod \"10f55730-6ea0-4989-a006-b0549f5566a7\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.760711 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "10f55730-6ea0-4989-a006-b0549f5566a7" (UID: "10f55730-6ea0-4989-a006-b0549f5566a7"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.760737 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"10f55730-6ea0-4989-a006-b0549f5566a7\" (UID: \"10f55730-6ea0-4989-a006-b0549f5566a7\") " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.761589 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "10f55730-6ea0-4989-a006-b0549f5566a7" (UID: "10f55730-6ea0-4989-a006-b0549f5566a7"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.762339 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c6516e19-8887-4dda-a635-bc93da2a19a6-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.762391 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c6516e19-8887-4dda-a635-bc93da2a19a6-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.762460 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llkq9\" (UniqueName: \"kubernetes.io/projected/c6516e19-8887-4dda-a635-bc93da2a19a6-kube-api-access-llkq9\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.762482 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c6516e19-8887-4dda-a635-bc93da2a19a6-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.762524 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c6516e19-8887-4dda-a635-bc93da2a19a6-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.762560 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c6516e19-8887-4dda-a635-bc93da2a19a6-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.762640 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.762711 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c6516e19-8887-4dda-a635-bc93da2a19a6-config-data\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.762738 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c6516e19-8887-4dda-a635-bc93da2a19a6-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.762915 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c6516e19-8887-4dda-a635-bc93da2a19a6-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.762944 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c6516e19-8887-4dda-a635-bc93da2a19a6-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.763016 4838 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.763028 4838 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.764469 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c6516e19-8887-4dda-a635-bc93da2a19a6-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.765552 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c6516e19-8887-4dda-a635-bc93da2a19a6-config-data\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.766907 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c6516e19-8887-4dda-a635-bc93da2a19a6-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.767129 4838 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.767553 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10f55730-6ea0-4989-a006-b0549f5566a7-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "10f55730-6ea0-4989-a006-b0549f5566a7" (UID: "10f55730-6ea0-4989-a006-b0549f5566a7"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.769862 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c6516e19-8887-4dda-a635-bc93da2a19a6-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.770302 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10f55730-6ea0-4989-a006-b0549f5566a7-kube-api-access-rmtv6" (OuterVolumeSpecName: "kube-api-access-rmtv6") pod "10f55730-6ea0-4989-a006-b0549f5566a7" (UID: "10f55730-6ea0-4989-a006-b0549f5566a7"). InnerVolumeSpecName "kube-api-access-rmtv6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.770376 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/10f55730-6ea0-4989-a006-b0549f5566a7-pod-info" (OuterVolumeSpecName: "pod-info") pod "10f55730-6ea0-4989-a006-b0549f5566a7" (UID: "10f55730-6ea0-4989-a006-b0549f5566a7"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.770432 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "10f55730-6ea0-4989-a006-b0549f5566a7" (UID: "10f55730-6ea0-4989-a006-b0549f5566a7"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.771129 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c6516e19-8887-4dda-a635-bc93da2a19a6-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.771147 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c6516e19-8887-4dda-a635-bc93da2a19a6-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.771504 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10f55730-6ea0-4989-a006-b0549f5566a7-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "10f55730-6ea0-4989-a006-b0549f5566a7" (UID: "10f55730-6ea0-4989-a006-b0549f5566a7"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.771779 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c6516e19-8887-4dda-a635-bc93da2a19a6-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.771782 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "persistence") pod "10f55730-6ea0-4989-a006-b0549f5566a7" (UID: "10f55730-6ea0-4989-a006-b0549f5566a7"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.773815 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c6516e19-8887-4dda-a635-bc93da2a19a6-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.780983 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c6516e19-8887-4dda-a635-bc93da2a19a6-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.788282 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llkq9\" (UniqueName: \"kubernetes.io/projected/c6516e19-8887-4dda-a635-bc93da2a19a6-kube-api-access-llkq9\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.796384 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10f55730-6ea0-4989-a006-b0549f5566a7-config-data" (OuterVolumeSpecName: "config-data") pod "10f55730-6ea0-4989-a006-b0549f5566a7" (UID: "10f55730-6ea0-4989-a006-b0549f5566a7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.818042 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"c6516e19-8887-4dda-a635-bc93da2a19a6\") " pod="openstack/rabbitmq-server-0" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.846777 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10f55730-6ea0-4989-a006-b0549f5566a7-server-conf" (OuterVolumeSpecName: "server-conf") pod "10f55730-6ea0-4989-a006-b0549f5566a7" (UID: "10f55730-6ea0-4989-a006-b0549f5566a7"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.868526 4838 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/10f55730-6ea0-4989-a006-b0549f5566a7-plugins-conf\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.868577 4838 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/10f55730-6ea0-4989-a006-b0549f5566a7-server-conf\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.868589 4838 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/10f55730-6ea0-4989-a006-b0549f5566a7-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.868600 4838 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/10f55730-6ea0-4989-a006-b0549f5566a7-pod-info\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.868642 4838 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/10f55730-6ea0-4989-a006-b0549f5566a7-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.868657 4838 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.868670 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rmtv6\" (UniqueName: \"kubernetes.io/projected/10f55730-6ea0-4989-a006-b0549f5566a7-kube-api-access-rmtv6\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.868729 4838 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.897037 4838 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.903815 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "10f55730-6ea0-4989-a006-b0549f5566a7" (UID: "10f55730-6ea0-4989-a006-b0549f5566a7"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.971886 4838 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:23 crc kubenswrapper[4838]: I0202 11:21:23.971954 4838 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/10f55730-6ea0-4989-a006-b0549f5566a7-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.087865 4838 generic.go:334] "Generic (PLEG): container finished" podID="10f55730-6ea0-4989-a006-b0549f5566a7" containerID="6a52eb6196a3e58500dc0f8f670fcaaf4bc4f7075c3b6a2c4342f51a72e7519b" exitCode=0 Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.087927 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"10f55730-6ea0-4989-a006-b0549f5566a7","Type":"ContainerDied","Data":"6a52eb6196a3e58500dc0f8f670fcaaf4bc4f7075c3b6a2c4342f51a72e7519b"} Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.087954 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"10f55730-6ea0-4989-a006-b0549f5566a7","Type":"ContainerDied","Data":"7626129137e1c6892f1a74181697a36e8ac860b4ad5264ef50f6178d6b185a66"} Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.087976 4838 scope.go:117] "RemoveContainer" containerID="6a52eb6196a3e58500dc0f8f670fcaaf4bc4f7075c3b6a2c4342f51a72e7519b" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.088127 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.110572 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.124787 4838 scope.go:117] "RemoveContainer" containerID="70f80cc3e845e9e6389966fa131ac34e28094b7a7010f3ba63b7ea5f71fcac55" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.150668 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.174974 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.179425 4838 scope.go:117] "RemoveContainer" containerID="6a52eb6196a3e58500dc0f8f670fcaaf4bc4f7075c3b6a2c4342f51a72e7519b" Feb 02 11:21:24 crc kubenswrapper[4838]: E0202 11:21:24.182069 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a52eb6196a3e58500dc0f8f670fcaaf4bc4f7075c3b6a2c4342f51a72e7519b\": container with ID starting with 6a52eb6196a3e58500dc0f8f670fcaaf4bc4f7075c3b6a2c4342f51a72e7519b not found: ID does not exist" containerID="6a52eb6196a3e58500dc0f8f670fcaaf4bc4f7075c3b6a2c4342f51a72e7519b" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.182261 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a52eb6196a3e58500dc0f8f670fcaaf4bc4f7075c3b6a2c4342f51a72e7519b"} err="failed to get container status \"6a52eb6196a3e58500dc0f8f670fcaaf4bc4f7075c3b6a2c4342f51a72e7519b\": rpc error: code = NotFound desc = could not find container \"6a52eb6196a3e58500dc0f8f670fcaaf4bc4f7075c3b6a2c4342f51a72e7519b\": container with ID starting with 6a52eb6196a3e58500dc0f8f670fcaaf4bc4f7075c3b6a2c4342f51a72e7519b not found: ID does not exist" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.182314 4838 scope.go:117] "RemoveContainer" containerID="70f80cc3e845e9e6389966fa131ac34e28094b7a7010f3ba63b7ea5f71fcac55" Feb 02 11:21:24 crc kubenswrapper[4838]: E0202 11:21:24.182910 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70f80cc3e845e9e6389966fa131ac34e28094b7a7010f3ba63b7ea5f71fcac55\": container with ID starting with 70f80cc3e845e9e6389966fa131ac34e28094b7a7010f3ba63b7ea5f71fcac55 not found: ID does not exist" containerID="70f80cc3e845e9e6389966fa131ac34e28094b7a7010f3ba63b7ea5f71fcac55" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.182962 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70f80cc3e845e9e6389966fa131ac34e28094b7a7010f3ba63b7ea5f71fcac55"} err="failed to get container status \"70f80cc3e845e9e6389966fa131ac34e28094b7a7010f3ba63b7ea5f71fcac55\": rpc error: code = NotFound desc = could not find container \"70f80cc3e845e9e6389966fa131ac34e28094b7a7010f3ba63b7ea5f71fcac55\": container with ID starting with 70f80cc3e845e9e6389966fa131ac34e28094b7a7010f3ba63b7ea5f71fcac55 not found: ID does not exist" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.201850 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 11:21:24 crc kubenswrapper[4838]: E0202 11:21:24.204048 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10f55730-6ea0-4989-a006-b0549f5566a7" containerName="rabbitmq" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.204084 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="10f55730-6ea0-4989-a006-b0549f5566a7" containerName="rabbitmq" Feb 02 11:21:24 crc kubenswrapper[4838]: E0202 11:21:24.204104 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10f55730-6ea0-4989-a006-b0549f5566a7" containerName="setup-container" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.204115 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="10f55730-6ea0-4989-a006-b0549f5566a7" containerName="setup-container" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.204535 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="10f55730-6ea0-4989-a006-b0549f5566a7" containerName="rabbitmq" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.206518 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.209782 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.210113 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.210257 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-25g28" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.210395 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.211316 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.211560 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.211821 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.215592 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.378421 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.378661 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.378677 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.378717 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tdmf\" (UniqueName: \"kubernetes.io/projected/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-kube-api-access-2tdmf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.378743 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.378783 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.378807 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.378825 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.378867 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.378906 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.378929 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.480290 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.480360 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.480396 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.480425 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.480447 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.480502 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tdmf\" (UniqueName: \"kubernetes.io/projected/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-kube-api-access-2tdmf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.480538 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.480611 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.480660 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.480683 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.480740 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.480869 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.481171 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.481421 4838 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.482260 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.482274 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.483895 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.486966 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.487333 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.492189 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.494978 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.506325 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tdmf\" (UniqueName: \"kubernetes.io/projected/a21f4822-c0c1-4b73-bad3-ddf3552c9ebd-kube-api-access-2tdmf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.523174 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.528079 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.531953 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10f55730-6ea0-4989-a006-b0549f5566a7" path="/var/lib/kubelet/pods/10f55730-6ea0-4989-a006-b0549f5566a7/volumes" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.532870 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="698d5f5d-683c-4130-8d4f-d1d59b5d32e4" path="/var/lib/kubelet/pods/698d5f5d-683c-4130-8d4f-d1d59b5d32e4/volumes" Feb 02 11:21:24 crc kubenswrapper[4838]: I0202 11:21:24.664761 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 11:21:24 crc kubenswrapper[4838]: W0202 11:21:24.684906 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc6516e19_8887_4dda_a635_bc93da2a19a6.slice/crio-556b7f00cdf545c4b7c0d9c486c4d0871b15dbf10bf5366f631e4ae151c02830 WatchSource:0}: Error finding container 556b7f00cdf545c4b7c0d9c486c4d0871b15dbf10bf5366f631e4ae151c02830: Status 404 returned error can't find the container with id 556b7f00cdf545c4b7c0d9c486c4d0871b15dbf10bf5366f631e4ae151c02830 Feb 02 11:21:25 crc kubenswrapper[4838]: W0202 11:21:25.004599 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda21f4822_c0c1_4b73_bad3_ddf3552c9ebd.slice/crio-f9ef10350913e8d3708453760f57c534b962f561451eea86ba305f26e9a04dce WatchSource:0}: Error finding container f9ef10350913e8d3708453760f57c534b962f561451eea86ba305f26e9a04dce: Status 404 returned error can't find the container with id f9ef10350913e8d3708453760f57c534b962f561451eea86ba305f26e9a04dce Feb 02 11:21:25 crc kubenswrapper[4838]: I0202 11:21:25.005891 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 11:21:25 crc kubenswrapper[4838]: I0202 11:21:25.113029 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c6516e19-8887-4dda-a635-bc93da2a19a6","Type":"ContainerStarted","Data":"556b7f00cdf545c4b7c0d9c486c4d0871b15dbf10bf5366f631e4ae151c02830"} Feb 02 11:21:25 crc kubenswrapper[4838]: I0202 11:21:25.115111 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd","Type":"ContainerStarted","Data":"f9ef10350913e8d3708453760f57c534b962f561451eea86ba305f26e9a04dce"} Feb 02 11:21:27 crc kubenswrapper[4838]: I0202 11:21:27.135869 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c6516e19-8887-4dda-a635-bc93da2a19a6","Type":"ContainerStarted","Data":"9a9e4e5535b225565767c144f0446282289227b2b4a25834d408b70c1c9c57d9"} Feb 02 11:21:27 crc kubenswrapper[4838]: I0202 11:21:27.138250 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd","Type":"ContainerStarted","Data":"8dd9f5aa84b498c8d8889be82c0a383626f211b54743a5b1c64311a93fb6b062"} Feb 02 11:21:36 crc kubenswrapper[4838]: I0202 11:21:36.046146 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-94f8-account-create-update-fcsh8"] Feb 02 11:21:36 crc kubenswrapper[4838]: I0202 11:21:36.056463 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-94f8-account-create-update-fcsh8"] Feb 02 11:21:36 crc kubenswrapper[4838]: I0202 11:21:36.513486 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:21:36 crc kubenswrapper[4838]: E0202 11:21:36.513973 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:21:36 crc kubenswrapper[4838]: I0202 11:21:36.516211 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10c3ebde-9586-4021-8295-261f4305e897" path="/var/lib/kubelet/pods/10c3ebde-9586-4021-8295-261f4305e897/volumes" Feb 02 11:21:37 crc kubenswrapper[4838]: I0202 11:21:37.047845 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-sz8sh"] Feb 02 11:21:37 crc kubenswrapper[4838]: I0202 11:21:37.060891 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-8c85-account-create-update-zthcx"] Feb 02 11:21:37 crc kubenswrapper[4838]: I0202 11:21:37.071335 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-dc9m7"] Feb 02 11:21:37 crc kubenswrapper[4838]: I0202 11:21:37.079868 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-8c85-account-create-update-zthcx"] Feb 02 11:21:37 crc kubenswrapper[4838]: I0202 11:21:37.089061 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-sz8sh"] Feb 02 11:21:37 crc kubenswrapper[4838]: I0202 11:21:37.097784 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-dc9m7"] Feb 02 11:21:38 crc kubenswrapper[4838]: I0202 11:21:38.516535 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3eb955c8-5244-437a-976d-4fb19d5177b2" path="/var/lib/kubelet/pods/3eb955c8-5244-437a-976d-4fb19d5177b2/volumes" Feb 02 11:21:38 crc kubenswrapper[4838]: I0202 11:21:38.517397 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc" path="/var/lib/kubelet/pods/6fe795b0-4a13-4de2-ae9e-67dfdf6ee5dc/volumes" Feb 02 11:21:38 crc kubenswrapper[4838]: I0202 11:21:38.518066 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fce5584a-406e-424c-af9b-bc1d37d0355c" path="/var/lib/kubelet/pods/fce5584a-406e-424c-af9b-bc1d37d0355c/volumes" Feb 02 11:21:40 crc kubenswrapper[4838]: I0202 11:21:40.030784 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-vl4tb"] Feb 02 11:21:40 crc kubenswrapper[4838]: I0202 11:21:40.041533 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-5e8e-account-create-update-55tw9"] Feb 02 11:21:40 crc kubenswrapper[4838]: I0202 11:21:40.049958 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-vl4tb"] Feb 02 11:21:40 crc kubenswrapper[4838]: I0202 11:21:40.073970 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-5e8e-account-create-update-55tw9"] Feb 02 11:21:40 crc kubenswrapper[4838]: I0202 11:21:40.519888 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1eed330b-d64c-42c0-a440-736df7d0d861" path="/var/lib/kubelet/pods/1eed330b-d64c-42c0-a440-736df7d0d861/volumes" Feb 02 11:21:40 crc kubenswrapper[4838]: I0202 11:21:40.520783 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c723ca1-2908-4fb1-820a-c440fbf6616c" path="/var/lib/kubelet/pods/3c723ca1-2908-4fb1-820a-c440fbf6616c/volumes" Feb 02 11:21:46 crc kubenswrapper[4838]: I0202 11:21:46.673183 4838 scope.go:117] "RemoveContainer" containerID="3dd7551e32793babb9d09a22aba1f3fe39d3a9f1b36d4d9d2acd43ed93eaa54f" Feb 02 11:21:46 crc kubenswrapper[4838]: I0202 11:21:46.696868 4838 scope.go:117] "RemoveContainer" containerID="b249dd2e7093a34f01285dc32cebbccc3bb5e23b8b74ddb1aa56b79ccd260288" Feb 02 11:21:46 crc kubenswrapper[4838]: I0202 11:21:46.758943 4838 scope.go:117] "RemoveContainer" containerID="811fa5ec02e5af3a07eed85a8bf177fec1894f8fc75301b3a3a5ef3e9f5a0002" Feb 02 11:21:46 crc kubenswrapper[4838]: I0202 11:21:46.795016 4838 scope.go:117] "RemoveContainer" containerID="a78ef2ce03949be516acb257c362be842f477d8e0584df55fdc99a9455daf185" Feb 02 11:21:46 crc kubenswrapper[4838]: I0202 11:21:46.837085 4838 scope.go:117] "RemoveContainer" containerID="8bc3328f33c16630f050f784999fade725650a89298eb28f8fb690df5e4cdfb6" Feb 02 11:21:46 crc kubenswrapper[4838]: I0202 11:21:46.886901 4838 scope.go:117] "RemoveContainer" containerID="afb6f33e8b1bcc250864e5db117f04acf89cad03722c422bbe6e36d83867aac2" Feb 02 11:21:49 crc kubenswrapper[4838]: I0202 11:21:49.507035 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:21:49 crc kubenswrapper[4838]: E0202 11:21:49.508108 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:21:59 crc kubenswrapper[4838]: I0202 11:21:59.451372 4838 generic.go:334] "Generic (PLEG): container finished" podID="a21f4822-c0c1-4b73-bad3-ddf3552c9ebd" containerID="8dd9f5aa84b498c8d8889be82c0a383626f211b54743a5b1c64311a93fb6b062" exitCode=0 Feb 02 11:21:59 crc kubenswrapper[4838]: I0202 11:21:59.451457 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd","Type":"ContainerDied","Data":"8dd9f5aa84b498c8d8889be82c0a383626f211b54743a5b1c64311a93fb6b062"} Feb 02 11:21:59 crc kubenswrapper[4838]: I0202 11:21:59.456797 4838 generic.go:334] "Generic (PLEG): container finished" podID="c6516e19-8887-4dda-a635-bc93da2a19a6" containerID="9a9e4e5535b225565767c144f0446282289227b2b4a25834d408b70c1c9c57d9" exitCode=0 Feb 02 11:21:59 crc kubenswrapper[4838]: I0202 11:21:59.456850 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c6516e19-8887-4dda-a635-bc93da2a19a6","Type":"ContainerDied","Data":"9a9e4e5535b225565767c144f0446282289227b2b4a25834d408b70c1c9c57d9"} Feb 02 11:22:00 crc kubenswrapper[4838]: I0202 11:22:00.470009 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c6516e19-8887-4dda-a635-bc93da2a19a6","Type":"ContainerStarted","Data":"a7fc9b9700278b4b144c34a7d4d1aff4742478582e202c9351b89fb67d25331c"} Feb 02 11:22:00 crc kubenswrapper[4838]: I0202 11:22:00.470498 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Feb 02 11:22:00 crc kubenswrapper[4838]: I0202 11:22:00.473644 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a21f4822-c0c1-4b73-bad3-ddf3552c9ebd","Type":"ContainerStarted","Data":"eb90a783a1f7aaec3ccc1c783ddab1682f12f3142f9ba4925116c2610338940d"} Feb 02 11:22:00 crc kubenswrapper[4838]: I0202 11:22:00.473915 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:22:00 crc kubenswrapper[4838]: I0202 11:22:00.501360 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.501341166 podStartE2EDuration="37.501341166s" podCreationTimestamp="2026-02-02 11:21:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:22:00.493270209 +0000 UTC m=+1714.830371257" watchObservedRunningTime="2026-02-02 11:22:00.501341166 +0000 UTC m=+1714.838442194" Feb 02 11:22:00 crc kubenswrapper[4838]: I0202 11:22:00.507849 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:22:00 crc kubenswrapper[4838]: E0202 11:22:00.508130 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:22:00 crc kubenswrapper[4838]: I0202 11:22:00.531909 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.531890756 podStartE2EDuration="36.531890756s" podCreationTimestamp="2026-02-02 11:21:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:22:00.524775585 +0000 UTC m=+1714.861876643" watchObservedRunningTime="2026-02-02 11:22:00.531890756 +0000 UTC m=+1714.868991784" Feb 02 11:22:12 crc kubenswrapper[4838]: I0202 11:22:12.506764 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:22:12 crc kubenswrapper[4838]: E0202 11:22:12.508007 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:22:14 crc kubenswrapper[4838]: I0202 11:22:14.114890 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Feb 02 11:22:14 crc kubenswrapper[4838]: I0202 11:22:14.532799 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Feb 02 11:22:20 crc kubenswrapper[4838]: I0202 11:22:20.049170 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-9fh77"] Feb 02 11:22:20 crc kubenswrapper[4838]: I0202 11:22:20.063048 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-9fh77"] Feb 02 11:22:20 crc kubenswrapper[4838]: I0202 11:22:20.521836 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2" path="/var/lib/kubelet/pods/145b0b8b-9fcb-4ed3-b4e1-6d6d06b3ebf2/volumes" Feb 02 11:22:24 crc kubenswrapper[4838]: I0202 11:22:24.032557 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-9eef-account-create-update-ldszm"] Feb 02 11:22:24 crc kubenswrapper[4838]: I0202 11:22:24.042016 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-ckfdv"] Feb 02 11:22:24 crc kubenswrapper[4838]: I0202 11:22:24.053565 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-6wr5s"] Feb 02 11:22:24 crc kubenswrapper[4838]: I0202 11:22:24.062049 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-ckfdv"] Feb 02 11:22:24 crc kubenswrapper[4838]: I0202 11:22:24.070382 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-6wr5s"] Feb 02 11:22:24 crc kubenswrapper[4838]: I0202 11:22:24.088033 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-9eef-account-create-update-ldszm"] Feb 02 11:22:24 crc kubenswrapper[4838]: I0202 11:22:24.507949 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:22:24 crc kubenswrapper[4838]: E0202 11:22:24.508413 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:22:24 crc kubenswrapper[4838]: I0202 11:22:24.522544 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69673190-e2b6-4bdc-b00c-0cef9815317b" path="/var/lib/kubelet/pods/69673190-e2b6-4bdc-b00c-0cef9815317b/volumes" Feb 02 11:22:24 crc kubenswrapper[4838]: I0202 11:22:24.523435 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8e20559-4e85-40ad-a733-41497a1772d1" path="/var/lib/kubelet/pods/c8e20559-4e85-40ad-a733-41497a1772d1/volumes" Feb 02 11:22:24 crc kubenswrapper[4838]: I0202 11:22:24.524380 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e57ac670-eea3-4857-8990-872ce1dba0e4" path="/var/lib/kubelet/pods/e57ac670-eea3-4857-8990-872ce1dba0e4/volumes" Feb 02 11:22:25 crc kubenswrapper[4838]: I0202 11:22:25.032925 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-8fe7-account-create-update-zd7vb"] Feb 02 11:22:25 crc kubenswrapper[4838]: I0202 11:22:25.045452 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-4bd92"] Feb 02 11:22:25 crc kubenswrapper[4838]: I0202 11:22:25.054183 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-4bd92"] Feb 02 11:22:25 crc kubenswrapper[4838]: I0202 11:22:25.062361 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-8fe7-account-create-update-zd7vb"] Feb 02 11:22:26 crc kubenswrapper[4838]: I0202 11:22:26.033919 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-d3b7-account-create-update-lkzdb"] Feb 02 11:22:26 crc kubenswrapper[4838]: I0202 11:22:26.044125 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-d3b7-account-create-update-lkzdb"] Feb 02 11:22:26 crc kubenswrapper[4838]: I0202 11:22:26.520162 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fdc9ecb-a604-4618-89a6-fd5a2237f10c" path="/var/lib/kubelet/pods/6fdc9ecb-a604-4618-89a6-fd5a2237f10c/volumes" Feb 02 11:22:26 crc kubenswrapper[4838]: I0202 11:22:26.520774 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d17b5d07-41bc-4ab5-b14d-c36ed19c5098" path="/var/lib/kubelet/pods/d17b5d07-41bc-4ab5-b14d-c36ed19c5098/volumes" Feb 02 11:22:26 crc kubenswrapper[4838]: I0202 11:22:26.521336 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4277c56-4293-4fd6-90e0-ddca7529ef0e" path="/var/lib/kubelet/pods/f4277c56-4293-4fd6-90e0-ddca7529ef0e/volumes" Feb 02 11:22:37 crc kubenswrapper[4838]: I0202 11:22:37.506187 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:22:37 crc kubenswrapper[4838]: E0202 11:22:37.507412 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:22:44 crc kubenswrapper[4838]: I0202 11:22:44.056832 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-cckg8"] Feb 02 11:22:44 crc kubenswrapper[4838]: I0202 11:22:44.066390 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-cckg8"] Feb 02 11:22:44 crc kubenswrapper[4838]: I0202 11:22:44.519765 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cea963bb-084e-4ba7-98b6-342e509be13f" path="/var/lib/kubelet/pods/cea963bb-084e-4ba7-98b6-342e509be13f/volumes" Feb 02 11:22:47 crc kubenswrapper[4838]: I0202 11:22:47.084999 4838 scope.go:117] "RemoveContainer" containerID="b86bfb431043af4342c1698232e0b1d88f1ad477003300c1587d851498159b51" Feb 02 11:22:47 crc kubenswrapper[4838]: I0202 11:22:47.109972 4838 scope.go:117] "RemoveContainer" containerID="0fbd794241f035aa14cb346f7df5823e22e87535dbf87ceb79e0b384590e4ce0" Feb 02 11:22:47 crc kubenswrapper[4838]: I0202 11:22:47.157355 4838 scope.go:117] "RemoveContainer" containerID="748342545ab7293a783e976545de062708d67a3d1fb017dcc3b8f125a247606e" Feb 02 11:22:47 crc kubenswrapper[4838]: I0202 11:22:47.218372 4838 scope.go:117] "RemoveContainer" containerID="769549457e214173337a68edd07eaed24af9846d43d4dab4959aadc5a25eb0b4" Feb 02 11:22:47 crc kubenswrapper[4838]: I0202 11:22:47.256244 4838 scope.go:117] "RemoveContainer" containerID="31f19b37edef2c2273ffe39f037f3687877b04c191b724412ab05a977a8a1412" Feb 02 11:22:47 crc kubenswrapper[4838]: I0202 11:22:47.316535 4838 scope.go:117] "RemoveContainer" containerID="286d16d8368715d8e2a87addffe858bca8f702c15fe53b22bcaab13965c1e2ae" Feb 02 11:22:47 crc kubenswrapper[4838]: I0202 11:22:47.362376 4838 scope.go:117] "RemoveContainer" containerID="c3aa15aa0daa6cd898bb97889a0838e9eefafd05b6e970db29db513eccdcbf64" Feb 02 11:22:47 crc kubenswrapper[4838]: I0202 11:22:47.386891 4838 scope.go:117] "RemoveContainer" containerID="7d5bb5d9de96729f138eb26c32c40200d811eef9cd8af9f50521718a8019cf36" Feb 02 11:22:47 crc kubenswrapper[4838]: I0202 11:22:47.410707 4838 scope.go:117] "RemoveContainer" containerID="16dc7743af77b3d501c26563d4ecfb9bc1955ce5c7b64bd905ac9ef9d33242d6" Feb 02 11:22:47 crc kubenswrapper[4838]: I0202 11:22:47.428933 4838 scope.go:117] "RemoveContainer" containerID="b50c9e2d52d1edde38b5fb4e522cf93e8959beffa6a892de7628550268a1ce0b" Feb 02 11:22:47 crc kubenswrapper[4838]: I0202 11:22:47.454240 4838 scope.go:117] "RemoveContainer" containerID="6f13a866bdd5fed5042948bb20c125fd7fd68dbd4011c30311f9e3998c7d4a8a" Feb 02 11:22:47 crc kubenswrapper[4838]: I0202 11:22:47.479145 4838 scope.go:117] "RemoveContainer" containerID="20015e6144fcc4eb75dfc9ff448234448937717bffbe119fe6d0fc467448c1eb" Feb 02 11:22:47 crc kubenswrapper[4838]: I0202 11:22:47.508826 4838 scope.go:117] "RemoveContainer" containerID="e869a9edd2974a471ccbde9676d4bca1acd45020dc23e073eab3aa52708946c6" Feb 02 11:22:52 crc kubenswrapper[4838]: I0202 11:22:52.505925 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:22:52 crc kubenswrapper[4838]: E0202 11:22:52.506715 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:23:04 crc kubenswrapper[4838]: I0202 11:23:04.036906 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ironic-db-create-lgk8x"] Feb 02 11:23:04 crc kubenswrapper[4838]: I0202 11:23:04.048781 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ironic-db-create-lgk8x"] Feb 02 11:23:04 crc kubenswrapper[4838]: I0202 11:23:04.518152 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="989df2c8-4c47-4998-a79d-967c08df69fb" path="/var/lib/kubelet/pods/989df2c8-4c47-4998-a79d-967c08df69fb/volumes" Feb 02 11:23:07 crc kubenswrapper[4838]: I0202 11:23:07.506401 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:23:07 crc kubenswrapper[4838]: E0202 11:23:07.509311 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:23:18 crc kubenswrapper[4838]: I0202 11:23:18.506570 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:23:18 crc kubenswrapper[4838]: E0202 11:23:18.507474 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:23:20 crc kubenswrapper[4838]: I0202 11:23:20.040026 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ironic-8dc3-account-create-update-9m9lm"] Feb 02 11:23:20 crc kubenswrapper[4838]: I0202 11:23:20.062047 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ironic-8dc3-account-create-update-9m9lm"] Feb 02 11:23:20 crc kubenswrapper[4838]: I0202 11:23:20.522414 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d92acdcc-36a4-41c7-bf03-f60966090662" path="/var/lib/kubelet/pods/d92acdcc-36a4-41c7-bf03-f60966090662/volumes" Feb 02 11:23:32 crc kubenswrapper[4838]: I0202 11:23:32.510093 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:23:32 crc kubenswrapper[4838]: E0202 11:23:32.510995 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:23:47 crc kubenswrapper[4838]: I0202 11:23:47.506120 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:23:47 crc kubenswrapper[4838]: E0202 11:23:47.507128 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:23:47 crc kubenswrapper[4838]: I0202 11:23:47.729735 4838 scope.go:117] "RemoveContainer" containerID="dd4e75cf1a445ce358dd00abea8be2a1f931d55aa1b063cd59b261aced1c7f73" Feb 02 11:23:47 crc kubenswrapper[4838]: I0202 11:23:47.797824 4838 scope.go:117] "RemoveContainer" containerID="f5df467f9b73675d5db53bb9ebb98ad3fa8454a7cff6f58b3e4f4ec825a5b01c" Feb 02 11:23:52 crc kubenswrapper[4838]: I0202 11:23:52.063887 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-cvhqc"] Feb 02 11:23:52 crc kubenswrapper[4838]: I0202 11:23:52.073649 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-cvhqc"] Feb 02 11:23:52 crc kubenswrapper[4838]: I0202 11:23:52.518170 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c087507-7f44-4ead-b6e0-622152cb2eaf" path="/var/lib/kubelet/pods/1c087507-7f44-4ead-b6e0-622152cb2eaf/volumes" Feb 02 11:23:56 crc kubenswrapper[4838]: I0202 11:23:56.034636 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-snvfg"] Feb 02 11:23:56 crc kubenswrapper[4838]: I0202 11:23:56.045571 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-snvfg"] Feb 02 11:23:56 crc kubenswrapper[4838]: I0202 11:23:56.527897 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54de1caa-888f-433a-be5e-87b93932abc2" path="/var/lib/kubelet/pods/54de1caa-888f-433a-be5e-87b93932abc2/volumes" Feb 02 11:24:01 crc kubenswrapper[4838]: I0202 11:24:01.505964 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:24:01 crc kubenswrapper[4838]: E0202 11:24:01.506825 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:24:13 crc kubenswrapper[4838]: I0202 11:24:13.506390 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:24:13 crc kubenswrapper[4838]: E0202 11:24:13.507667 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:24:27 crc kubenswrapper[4838]: I0202 11:24:27.505918 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:24:27 crc kubenswrapper[4838]: E0202 11:24:27.523776 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:24:41 crc kubenswrapper[4838]: I0202 11:24:41.507305 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:24:41 crc kubenswrapper[4838]: E0202 11:24:41.508824 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:24:44 crc kubenswrapper[4838]: I0202 11:24:44.037915 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-swztr"] Feb 02 11:24:44 crc kubenswrapper[4838]: I0202 11:24:44.050102 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-swztr"] Feb 02 11:24:44 crc kubenswrapper[4838]: I0202 11:24:44.518426 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c1f48f2-93aa-4b92-a289-7869c1993629" path="/var/lib/kubelet/pods/3c1f48f2-93aa-4b92-a289-7869c1993629/volumes" Feb 02 11:24:47 crc kubenswrapper[4838]: I0202 11:24:47.880103 4838 scope.go:117] "RemoveContainer" containerID="c467002944aa28ab1606b991f49e609351c7d6cd2c5e92143ff8d7880235d978" Feb 02 11:24:47 crc kubenswrapper[4838]: I0202 11:24:47.917005 4838 scope.go:117] "RemoveContainer" containerID="c855af0fa2c08c2144c31666a641e6cbb7ea66182beb1e800da4f1f4d8f51032" Feb 02 11:24:47 crc kubenswrapper[4838]: I0202 11:24:47.967603 4838 scope.go:117] "RemoveContainer" containerID="c51949688f02a5a86158690a24397981872e3a43b4c2e7c7456e37c01858a43a" Feb 02 11:24:55 crc kubenswrapper[4838]: I0202 11:24:55.506444 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:24:55 crc kubenswrapper[4838]: E0202 11:24:55.507028 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:25:07 crc kubenswrapper[4838]: I0202 11:25:07.506393 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:25:07 crc kubenswrapper[4838]: E0202 11:25:07.507203 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:25:19 crc kubenswrapper[4838]: I0202 11:25:19.506269 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:25:19 crc kubenswrapper[4838]: E0202 11:25:19.507531 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:25:30 crc kubenswrapper[4838]: I0202 11:25:30.506909 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:25:30 crc kubenswrapper[4838]: E0202 11:25:30.507600 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:25:42 crc kubenswrapper[4838]: I0202 11:25:42.506017 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:25:42 crc kubenswrapper[4838]: E0202 11:25:42.506869 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:25:57 crc kubenswrapper[4838]: I0202 11:25:57.505654 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:25:58 crc kubenswrapper[4838]: I0202 11:25:58.620797 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerStarted","Data":"175ffe8c2785ccb5a4bb5ad4bb28adaa432562c8282f7529b47cf5fe3050f259"} Feb 02 11:25:59 crc kubenswrapper[4838]: I0202 11:25:59.046117 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-bx8gk"] Feb 02 11:25:59 crc kubenswrapper[4838]: I0202 11:25:59.055888 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-bx8gk"] Feb 02 11:26:00 crc kubenswrapper[4838]: I0202 11:26:00.516499 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c77290d-3574-4eb4-ab26-abf2f666367b" path="/var/lib/kubelet/pods/6c77290d-3574-4eb4-ab26-abf2f666367b/volumes" Feb 02 11:26:01 crc kubenswrapper[4838]: I0202 11:26:01.030520 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-2b0b-account-create-update-kk8h4"] Feb 02 11:26:01 crc kubenswrapper[4838]: I0202 11:26:01.062136 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-2b0b-account-create-update-kk8h4"] Feb 02 11:26:02 crc kubenswrapper[4838]: I0202 11:26:02.522456 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3" path="/var/lib/kubelet/pods/8b1e17d7-7746-46df-bcfd-2ca72c0cd2b3/volumes" Feb 02 11:26:07 crc kubenswrapper[4838]: I0202 11:26:07.050729 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-bh9vr"] Feb 02 11:26:07 crc kubenswrapper[4838]: I0202 11:26:07.064299 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-bh9vr"] Feb 02 11:26:08 crc kubenswrapper[4838]: I0202 11:26:08.033321 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-mpdzc"] Feb 02 11:26:08 crc kubenswrapper[4838]: I0202 11:26:08.041221 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-mpdzc"] Feb 02 11:26:08 crc kubenswrapper[4838]: I0202 11:26:08.520735 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="052c9a16-08e7-4892-8bcd-dc3055e14ca8" path="/var/lib/kubelet/pods/052c9a16-08e7-4892-8bcd-dc3055e14ca8/volumes" Feb 02 11:26:08 crc kubenswrapper[4838]: I0202 11:26:08.521651 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d369bf9a-8e44-4186-8513-9f73bd321e6e" path="/var/lib/kubelet/pods/d369bf9a-8e44-4186-8513-9f73bd321e6e/volumes" Feb 02 11:26:13 crc kubenswrapper[4838]: I0202 11:26:13.032645 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-e31d-account-create-update-xckgb"] Feb 02 11:26:13 crc kubenswrapper[4838]: I0202 11:26:13.042298 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-5f6e-account-create-update-jp75l"] Feb 02 11:26:13 crc kubenswrapper[4838]: I0202 11:26:13.053387 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-5f6e-account-create-update-jp75l"] Feb 02 11:26:13 crc kubenswrapper[4838]: I0202 11:26:13.063059 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-e31d-account-create-update-xckgb"] Feb 02 11:26:14 crc kubenswrapper[4838]: I0202 11:26:14.519257 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55" path="/var/lib/kubelet/pods/04fce4ed-ea3e-4bf7-b4ab-88b877fcbf55/volumes" Feb 02 11:26:14 crc kubenswrapper[4838]: I0202 11:26:14.521221 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c77c3373-fdbf-4a38-8ab0-ce701577f29f" path="/var/lib/kubelet/pods/c77c3373-fdbf-4a38-8ab0-ce701577f29f/volumes" Feb 02 11:26:32 crc kubenswrapper[4838]: I0202 11:26:32.032437 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-4dkm9"] Feb 02 11:26:32 crc kubenswrapper[4838]: I0202 11:26:32.058408 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-4dkm9"] Feb 02 11:26:32 crc kubenswrapper[4838]: I0202 11:26:32.516739 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50879d46-58dc-4716-89fd-bc68eea3bd2e" path="/var/lib/kubelet/pods/50879d46-58dc-4716-89fd-bc68eea3bd2e/volumes" Feb 02 11:26:48 crc kubenswrapper[4838]: I0202 11:26:48.125757 4838 scope.go:117] "RemoveContainer" containerID="30ba2e023fa61f40d57ac45157d0816e07d0ffa2117bfb9862c75fcc972e1bcb" Feb 02 11:26:48 crc kubenswrapper[4838]: I0202 11:26:48.162931 4838 scope.go:117] "RemoveContainer" containerID="3cdd611b1be2d3ebe42022994f552f691fd0f980327a4ce4b87bcfe2894d8292" Feb 02 11:26:48 crc kubenswrapper[4838]: I0202 11:26:48.193443 4838 scope.go:117] "RemoveContainer" containerID="b4aabe4e7178839b44f6e257f70df7bbdc3d81f6766309501d4ed6d2e1482a0b" Feb 02 11:26:48 crc kubenswrapper[4838]: I0202 11:26:48.219141 4838 scope.go:117] "RemoveContainer" containerID="093cc634de90b5c2024fdd65ae71c20107d325c1d7fb2acfa1d56c855a7890cb" Feb 02 11:26:48 crc kubenswrapper[4838]: I0202 11:26:48.262363 4838 scope.go:117] "RemoveContainer" containerID="858527d4cfb040fb4f32e431c29bb41f26498554fd87d58c448f438b4a7a8c7f" Feb 02 11:26:48 crc kubenswrapper[4838]: I0202 11:26:48.318886 4838 scope.go:117] "RemoveContainer" containerID="d6a5b3e1e3078525d973a08ccf0d97e428181aa8b957ddb5039e2661ddf1a500" Feb 02 11:26:48 crc kubenswrapper[4838]: I0202 11:26:48.373332 4838 scope.go:117] "RemoveContainer" containerID="a4509ee30dd40d9774a51e328127f4308faaa33f39254883fe22b5aed9fbc5da" Feb 02 11:26:48 crc kubenswrapper[4838]: I0202 11:26:48.408645 4838 scope.go:117] "RemoveContainer" containerID="ac1a6fcf0eca00396a708ee597872b3453fc12310c0a4d0f7e014f7839628f48" Feb 02 11:26:48 crc kubenswrapper[4838]: I0202 11:26:48.432852 4838 scope.go:117] "RemoveContainer" containerID="d8d0f7451dcbb418f26043ca0932b0d2cea018fd39287787d4f3fdad971ceee4" Feb 02 11:26:48 crc kubenswrapper[4838]: I0202 11:26:48.455716 4838 scope.go:117] "RemoveContainer" containerID="0b5d55f1358badab9b4cea5a00f3aaa32f6b79b2e6923f7aa38805adfd89f216" Feb 02 11:26:49 crc kubenswrapper[4838]: I0202 11:26:49.713692 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4f74c"] Feb 02 11:26:49 crc kubenswrapper[4838]: I0202 11:26:49.721819 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4f74c" Feb 02 11:26:49 crc kubenswrapper[4838]: I0202 11:26:49.731911 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4f74c"] Feb 02 11:26:49 crc kubenswrapper[4838]: I0202 11:26:49.878272 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b24e956c-027a-4a1e-ab59-8d56c2c1bc42-catalog-content\") pod \"redhat-operators-4f74c\" (UID: \"b24e956c-027a-4a1e-ab59-8d56c2c1bc42\") " pod="openshift-marketplace/redhat-operators-4f74c" Feb 02 11:26:49 crc kubenswrapper[4838]: I0202 11:26:49.878425 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b24e956c-027a-4a1e-ab59-8d56c2c1bc42-utilities\") pod \"redhat-operators-4f74c\" (UID: \"b24e956c-027a-4a1e-ab59-8d56c2c1bc42\") " pod="openshift-marketplace/redhat-operators-4f74c" Feb 02 11:26:49 crc kubenswrapper[4838]: I0202 11:26:49.878454 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpb8k\" (UniqueName: \"kubernetes.io/projected/b24e956c-027a-4a1e-ab59-8d56c2c1bc42-kube-api-access-dpb8k\") pod \"redhat-operators-4f74c\" (UID: \"b24e956c-027a-4a1e-ab59-8d56c2c1bc42\") " pod="openshift-marketplace/redhat-operators-4f74c" Feb 02 11:26:49 crc kubenswrapper[4838]: I0202 11:26:49.980169 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b24e956c-027a-4a1e-ab59-8d56c2c1bc42-catalog-content\") pod \"redhat-operators-4f74c\" (UID: \"b24e956c-027a-4a1e-ab59-8d56c2c1bc42\") " pod="openshift-marketplace/redhat-operators-4f74c" Feb 02 11:26:49 crc kubenswrapper[4838]: I0202 11:26:49.980299 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b24e956c-027a-4a1e-ab59-8d56c2c1bc42-utilities\") pod \"redhat-operators-4f74c\" (UID: \"b24e956c-027a-4a1e-ab59-8d56c2c1bc42\") " pod="openshift-marketplace/redhat-operators-4f74c" Feb 02 11:26:49 crc kubenswrapper[4838]: I0202 11:26:49.980325 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpb8k\" (UniqueName: \"kubernetes.io/projected/b24e956c-027a-4a1e-ab59-8d56c2c1bc42-kube-api-access-dpb8k\") pod \"redhat-operators-4f74c\" (UID: \"b24e956c-027a-4a1e-ab59-8d56c2c1bc42\") " pod="openshift-marketplace/redhat-operators-4f74c" Feb 02 11:26:49 crc kubenswrapper[4838]: I0202 11:26:49.980862 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b24e956c-027a-4a1e-ab59-8d56c2c1bc42-utilities\") pod \"redhat-operators-4f74c\" (UID: \"b24e956c-027a-4a1e-ab59-8d56c2c1bc42\") " pod="openshift-marketplace/redhat-operators-4f74c" Feb 02 11:26:49 crc kubenswrapper[4838]: I0202 11:26:49.980865 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b24e956c-027a-4a1e-ab59-8d56c2c1bc42-catalog-content\") pod \"redhat-operators-4f74c\" (UID: \"b24e956c-027a-4a1e-ab59-8d56c2c1bc42\") " pod="openshift-marketplace/redhat-operators-4f74c" Feb 02 11:26:50 crc kubenswrapper[4838]: I0202 11:26:50.006822 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpb8k\" (UniqueName: \"kubernetes.io/projected/b24e956c-027a-4a1e-ab59-8d56c2c1bc42-kube-api-access-dpb8k\") pod \"redhat-operators-4f74c\" (UID: \"b24e956c-027a-4a1e-ab59-8d56c2c1bc42\") " pod="openshift-marketplace/redhat-operators-4f74c" Feb 02 11:26:50 crc kubenswrapper[4838]: I0202 11:26:50.042060 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4f74c" Feb 02 11:26:50 crc kubenswrapper[4838]: I0202 11:26:50.578240 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4f74c"] Feb 02 11:26:50 crc kubenswrapper[4838]: W0202 11:26:50.582557 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb24e956c_027a_4a1e_ab59_8d56c2c1bc42.slice/crio-f08a1985da4da3c7b76e60d2550f6e08726e23f8bd162ed5fde42276112241ae WatchSource:0}: Error finding container f08a1985da4da3c7b76e60d2550f6e08726e23f8bd162ed5fde42276112241ae: Status 404 returned error can't find the container with id f08a1985da4da3c7b76e60d2550f6e08726e23f8bd162ed5fde42276112241ae Feb 02 11:26:51 crc kubenswrapper[4838]: I0202 11:26:51.090255 4838 generic.go:334] "Generic (PLEG): container finished" podID="b24e956c-027a-4a1e-ab59-8d56c2c1bc42" containerID="2620c176c33e45422347881b8707bdb131bfba5827bd58bb447e4296f17dd258" exitCode=0 Feb 02 11:26:51 crc kubenswrapper[4838]: I0202 11:26:51.090505 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f74c" event={"ID":"b24e956c-027a-4a1e-ab59-8d56c2c1bc42","Type":"ContainerDied","Data":"2620c176c33e45422347881b8707bdb131bfba5827bd58bb447e4296f17dd258"} Feb 02 11:26:51 crc kubenswrapper[4838]: I0202 11:26:51.090530 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f74c" event={"ID":"b24e956c-027a-4a1e-ab59-8d56c2c1bc42","Type":"ContainerStarted","Data":"f08a1985da4da3c7b76e60d2550f6e08726e23f8bd162ed5fde42276112241ae"} Feb 02 11:26:51 crc kubenswrapper[4838]: I0202 11:26:51.092522 4838 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 11:26:52 crc kubenswrapper[4838]: I0202 11:26:52.100891 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f74c" event={"ID":"b24e956c-027a-4a1e-ab59-8d56c2c1bc42","Type":"ContainerStarted","Data":"78308e155eb21c591e239f3e9732b89bbde1c4d0dd446eadec9de5001a16c8d2"} Feb 02 11:26:54 crc kubenswrapper[4838]: I0202 11:26:54.121892 4838 generic.go:334] "Generic (PLEG): container finished" podID="b24e956c-027a-4a1e-ab59-8d56c2c1bc42" containerID="78308e155eb21c591e239f3e9732b89bbde1c4d0dd446eadec9de5001a16c8d2" exitCode=0 Feb 02 11:26:54 crc kubenswrapper[4838]: I0202 11:26:54.121983 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f74c" event={"ID":"b24e956c-027a-4a1e-ab59-8d56c2c1bc42","Type":"ContainerDied","Data":"78308e155eb21c591e239f3e9732b89bbde1c4d0dd446eadec9de5001a16c8d2"} Feb 02 11:26:55 crc kubenswrapper[4838]: I0202 11:26:55.133687 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f74c" event={"ID":"b24e956c-027a-4a1e-ab59-8d56c2c1bc42","Type":"ContainerStarted","Data":"adfe9f4c7f1fc0a23561774d6ef32c8e6868ebf4c2672036fa69387ebfd87016"} Feb 02 11:26:55 crc kubenswrapper[4838]: I0202 11:26:55.153141 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4f74c" podStartSLOduration=2.7108185860000003 podStartE2EDuration="6.153118795s" podCreationTimestamp="2026-02-02 11:26:49 +0000 UTC" firstStartedPulling="2026-02-02 11:26:51.092330919 +0000 UTC m=+2005.429431947" lastFinishedPulling="2026-02-02 11:26:54.534631128 +0000 UTC m=+2008.871732156" observedRunningTime="2026-02-02 11:26:55.150333109 +0000 UTC m=+2009.487434137" watchObservedRunningTime="2026-02-02 11:26:55.153118795 +0000 UTC m=+2009.490219833" Feb 02 11:27:00 crc kubenswrapper[4838]: I0202 11:27:00.042708 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4f74c" Feb 02 11:27:00 crc kubenswrapper[4838]: I0202 11:27:00.043283 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4f74c" Feb 02 11:27:01 crc kubenswrapper[4838]: I0202 11:27:01.102571 4838 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-4f74c" podUID="b24e956c-027a-4a1e-ab59-8d56c2c1bc42" containerName="registry-server" probeResult="failure" output=< Feb 02 11:27:01 crc kubenswrapper[4838]: timeout: failed to connect service ":50051" within 1s Feb 02 11:27:01 crc kubenswrapper[4838]: > Feb 02 11:27:10 crc kubenswrapper[4838]: I0202 11:27:10.095027 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4f74c" Feb 02 11:27:10 crc kubenswrapper[4838]: I0202 11:27:10.151704 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4f74c" Feb 02 11:27:10 crc kubenswrapper[4838]: I0202 11:27:10.336898 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4f74c"] Feb 02 11:27:11 crc kubenswrapper[4838]: I0202 11:27:11.259140 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4f74c" podUID="b24e956c-027a-4a1e-ab59-8d56c2c1bc42" containerName="registry-server" containerID="cri-o://adfe9f4c7f1fc0a23561774d6ef32c8e6868ebf4c2672036fa69387ebfd87016" gracePeriod=2 Feb 02 11:27:11 crc kubenswrapper[4838]: I0202 11:27:11.753076 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4f74c" Feb 02 11:27:11 crc kubenswrapper[4838]: I0202 11:27:11.933288 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b24e956c-027a-4a1e-ab59-8d56c2c1bc42-catalog-content\") pod \"b24e956c-027a-4a1e-ab59-8d56c2c1bc42\" (UID: \"b24e956c-027a-4a1e-ab59-8d56c2c1bc42\") " Feb 02 11:27:11 crc kubenswrapper[4838]: I0202 11:27:11.933428 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpb8k\" (UniqueName: \"kubernetes.io/projected/b24e956c-027a-4a1e-ab59-8d56c2c1bc42-kube-api-access-dpb8k\") pod \"b24e956c-027a-4a1e-ab59-8d56c2c1bc42\" (UID: \"b24e956c-027a-4a1e-ab59-8d56c2c1bc42\") " Feb 02 11:27:11 crc kubenswrapper[4838]: I0202 11:27:11.933520 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b24e956c-027a-4a1e-ab59-8d56c2c1bc42-utilities\") pod \"b24e956c-027a-4a1e-ab59-8d56c2c1bc42\" (UID: \"b24e956c-027a-4a1e-ab59-8d56c2c1bc42\") " Feb 02 11:27:11 crc kubenswrapper[4838]: I0202 11:27:11.934286 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b24e956c-027a-4a1e-ab59-8d56c2c1bc42-utilities" (OuterVolumeSpecName: "utilities") pod "b24e956c-027a-4a1e-ab59-8d56c2c1bc42" (UID: "b24e956c-027a-4a1e-ab59-8d56c2c1bc42"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:27:11 crc kubenswrapper[4838]: I0202 11:27:11.938969 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b24e956c-027a-4a1e-ab59-8d56c2c1bc42-kube-api-access-dpb8k" (OuterVolumeSpecName: "kube-api-access-dpb8k") pod "b24e956c-027a-4a1e-ab59-8d56c2c1bc42" (UID: "b24e956c-027a-4a1e-ab59-8d56c2c1bc42"). InnerVolumeSpecName "kube-api-access-dpb8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:27:12 crc kubenswrapper[4838]: I0202 11:27:12.035359 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b24e956c-027a-4a1e-ab59-8d56c2c1bc42-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 11:27:12 crc kubenswrapper[4838]: I0202 11:27:12.035395 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpb8k\" (UniqueName: \"kubernetes.io/projected/b24e956c-027a-4a1e-ab59-8d56c2c1bc42-kube-api-access-dpb8k\") on node \"crc\" DevicePath \"\"" Feb 02 11:27:12 crc kubenswrapper[4838]: I0202 11:27:12.075848 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b24e956c-027a-4a1e-ab59-8d56c2c1bc42-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b24e956c-027a-4a1e-ab59-8d56c2c1bc42" (UID: "b24e956c-027a-4a1e-ab59-8d56c2c1bc42"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:27:12 crc kubenswrapper[4838]: I0202 11:27:12.138465 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b24e956c-027a-4a1e-ab59-8d56c2c1bc42-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 11:27:12 crc kubenswrapper[4838]: I0202 11:27:12.271960 4838 generic.go:334] "Generic (PLEG): container finished" podID="b24e956c-027a-4a1e-ab59-8d56c2c1bc42" containerID="adfe9f4c7f1fc0a23561774d6ef32c8e6868ebf4c2672036fa69387ebfd87016" exitCode=0 Feb 02 11:27:12 crc kubenswrapper[4838]: I0202 11:27:12.272011 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f74c" event={"ID":"b24e956c-027a-4a1e-ab59-8d56c2c1bc42","Type":"ContainerDied","Data":"adfe9f4c7f1fc0a23561774d6ef32c8e6868ebf4c2672036fa69387ebfd87016"} Feb 02 11:27:12 crc kubenswrapper[4838]: I0202 11:27:12.272020 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4f74c" Feb 02 11:27:12 crc kubenswrapper[4838]: I0202 11:27:12.272040 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f74c" event={"ID":"b24e956c-027a-4a1e-ab59-8d56c2c1bc42","Type":"ContainerDied","Data":"f08a1985da4da3c7b76e60d2550f6e08726e23f8bd162ed5fde42276112241ae"} Feb 02 11:27:12 crc kubenswrapper[4838]: I0202 11:27:12.272058 4838 scope.go:117] "RemoveContainer" containerID="adfe9f4c7f1fc0a23561774d6ef32c8e6868ebf4c2672036fa69387ebfd87016" Feb 02 11:27:12 crc kubenswrapper[4838]: I0202 11:27:12.306816 4838 scope.go:117] "RemoveContainer" containerID="78308e155eb21c591e239f3e9732b89bbde1c4d0dd446eadec9de5001a16c8d2" Feb 02 11:27:12 crc kubenswrapper[4838]: I0202 11:27:12.312025 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4f74c"] Feb 02 11:27:12 crc kubenswrapper[4838]: I0202 11:27:12.321547 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4f74c"] Feb 02 11:27:12 crc kubenswrapper[4838]: I0202 11:27:12.347153 4838 scope.go:117] "RemoveContainer" containerID="2620c176c33e45422347881b8707bdb131bfba5827bd58bb447e4296f17dd258" Feb 02 11:27:12 crc kubenswrapper[4838]: I0202 11:27:12.395143 4838 scope.go:117] "RemoveContainer" containerID="adfe9f4c7f1fc0a23561774d6ef32c8e6868ebf4c2672036fa69387ebfd87016" Feb 02 11:27:12 crc kubenswrapper[4838]: E0202 11:27:12.395704 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"adfe9f4c7f1fc0a23561774d6ef32c8e6868ebf4c2672036fa69387ebfd87016\": container with ID starting with adfe9f4c7f1fc0a23561774d6ef32c8e6868ebf4c2672036fa69387ebfd87016 not found: ID does not exist" containerID="adfe9f4c7f1fc0a23561774d6ef32c8e6868ebf4c2672036fa69387ebfd87016" Feb 02 11:27:12 crc kubenswrapper[4838]: I0202 11:27:12.395848 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adfe9f4c7f1fc0a23561774d6ef32c8e6868ebf4c2672036fa69387ebfd87016"} err="failed to get container status \"adfe9f4c7f1fc0a23561774d6ef32c8e6868ebf4c2672036fa69387ebfd87016\": rpc error: code = NotFound desc = could not find container \"adfe9f4c7f1fc0a23561774d6ef32c8e6868ebf4c2672036fa69387ebfd87016\": container with ID starting with adfe9f4c7f1fc0a23561774d6ef32c8e6868ebf4c2672036fa69387ebfd87016 not found: ID does not exist" Feb 02 11:27:12 crc kubenswrapper[4838]: I0202 11:27:12.395949 4838 scope.go:117] "RemoveContainer" containerID="78308e155eb21c591e239f3e9732b89bbde1c4d0dd446eadec9de5001a16c8d2" Feb 02 11:27:12 crc kubenswrapper[4838]: E0202 11:27:12.396437 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78308e155eb21c591e239f3e9732b89bbde1c4d0dd446eadec9de5001a16c8d2\": container with ID starting with 78308e155eb21c591e239f3e9732b89bbde1c4d0dd446eadec9de5001a16c8d2 not found: ID does not exist" containerID="78308e155eb21c591e239f3e9732b89bbde1c4d0dd446eadec9de5001a16c8d2" Feb 02 11:27:12 crc kubenswrapper[4838]: I0202 11:27:12.396478 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78308e155eb21c591e239f3e9732b89bbde1c4d0dd446eadec9de5001a16c8d2"} err="failed to get container status \"78308e155eb21c591e239f3e9732b89bbde1c4d0dd446eadec9de5001a16c8d2\": rpc error: code = NotFound desc = could not find container \"78308e155eb21c591e239f3e9732b89bbde1c4d0dd446eadec9de5001a16c8d2\": container with ID starting with 78308e155eb21c591e239f3e9732b89bbde1c4d0dd446eadec9de5001a16c8d2 not found: ID does not exist" Feb 02 11:27:12 crc kubenswrapper[4838]: I0202 11:27:12.396505 4838 scope.go:117] "RemoveContainer" containerID="2620c176c33e45422347881b8707bdb131bfba5827bd58bb447e4296f17dd258" Feb 02 11:27:12 crc kubenswrapper[4838]: E0202 11:27:12.396836 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2620c176c33e45422347881b8707bdb131bfba5827bd58bb447e4296f17dd258\": container with ID starting with 2620c176c33e45422347881b8707bdb131bfba5827bd58bb447e4296f17dd258 not found: ID does not exist" containerID="2620c176c33e45422347881b8707bdb131bfba5827bd58bb447e4296f17dd258" Feb 02 11:27:12 crc kubenswrapper[4838]: I0202 11:27:12.396875 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2620c176c33e45422347881b8707bdb131bfba5827bd58bb447e4296f17dd258"} err="failed to get container status \"2620c176c33e45422347881b8707bdb131bfba5827bd58bb447e4296f17dd258\": rpc error: code = NotFound desc = could not find container \"2620c176c33e45422347881b8707bdb131bfba5827bd58bb447e4296f17dd258\": container with ID starting with 2620c176c33e45422347881b8707bdb131bfba5827bd58bb447e4296f17dd258 not found: ID does not exist" Feb 02 11:27:12 crc kubenswrapper[4838]: I0202 11:27:12.520111 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b24e956c-027a-4a1e-ab59-8d56c2c1bc42" path="/var/lib/kubelet/pods/b24e956c-027a-4a1e-ab59-8d56c2c1bc42/volumes" Feb 02 11:27:25 crc kubenswrapper[4838]: I0202 11:27:25.063583 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-5jll9"] Feb 02 11:27:25 crc kubenswrapper[4838]: I0202 11:27:25.075089 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-5jll9"] Feb 02 11:27:26 crc kubenswrapper[4838]: I0202 11:27:26.525460 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12da676a-3c0b-4e05-996b-6f0b859d99e3" path="/var/lib/kubelet/pods/12da676a-3c0b-4e05-996b-6f0b859d99e3/volumes" Feb 02 11:27:48 crc kubenswrapper[4838]: I0202 11:27:48.655362 4838 scope.go:117] "RemoveContainer" containerID="05667767dc9c615f688152f9f30c5e1ea9de23e5a3834c9f6d404517e4864ba3" Feb 02 11:28:05 crc kubenswrapper[4838]: I0202 11:28:05.037009 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ironic-inspector-db-create-rrckz"] Feb 02 11:28:05 crc kubenswrapper[4838]: I0202 11:28:05.048116 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ironic-inspector-5de1-account-create-update-blrln"] Feb 02 11:28:05 crc kubenswrapper[4838]: I0202 11:28:05.058919 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ironic-inspector-db-create-rrckz"] Feb 02 11:28:05 crc kubenswrapper[4838]: I0202 11:28:05.071237 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ironic-inspector-5de1-account-create-update-blrln"] Feb 02 11:28:06 crc kubenswrapper[4838]: I0202 11:28:06.524841 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dec3cd0-38f7-4d41-92ad-2e0c8d36e136" path="/var/lib/kubelet/pods/9dec3cd0-38f7-4d41-92ad-2e0c8d36e136/volumes" Feb 02 11:28:06 crc kubenswrapper[4838]: I0202 11:28:06.525910 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f" path="/var/lib/kubelet/pods/c6cfc3ca-16a5-4d4c-90ac-511fec2bdc3f/volumes" Feb 02 11:28:06 crc kubenswrapper[4838]: I0202 11:28:06.590910 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wjz54/must-gather-29j7m"] Feb 02 11:28:06 crc kubenswrapper[4838]: E0202 11:28:06.591382 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b24e956c-027a-4a1e-ab59-8d56c2c1bc42" containerName="registry-server" Feb 02 11:28:06 crc kubenswrapper[4838]: I0202 11:28:06.591401 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="b24e956c-027a-4a1e-ab59-8d56c2c1bc42" containerName="registry-server" Feb 02 11:28:06 crc kubenswrapper[4838]: E0202 11:28:06.591413 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b24e956c-027a-4a1e-ab59-8d56c2c1bc42" containerName="extract-utilities" Feb 02 11:28:06 crc kubenswrapper[4838]: I0202 11:28:06.591422 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="b24e956c-027a-4a1e-ab59-8d56c2c1bc42" containerName="extract-utilities" Feb 02 11:28:06 crc kubenswrapper[4838]: E0202 11:28:06.591453 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b24e956c-027a-4a1e-ab59-8d56c2c1bc42" containerName="extract-content" Feb 02 11:28:06 crc kubenswrapper[4838]: I0202 11:28:06.591460 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="b24e956c-027a-4a1e-ab59-8d56c2c1bc42" containerName="extract-content" Feb 02 11:28:06 crc kubenswrapper[4838]: I0202 11:28:06.591686 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="b24e956c-027a-4a1e-ab59-8d56c2c1bc42" containerName="registry-server" Feb 02 11:28:06 crc kubenswrapper[4838]: I0202 11:28:06.592873 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wjz54/must-gather-29j7m" Feb 02 11:28:06 crc kubenswrapper[4838]: I0202 11:28:06.598231 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-wjz54"/"default-dockercfg-jx7mm" Feb 02 11:28:06 crc kubenswrapper[4838]: I0202 11:28:06.599708 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wjz54"/"openshift-service-ca.crt" Feb 02 11:28:06 crc kubenswrapper[4838]: I0202 11:28:06.610653 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wjz54"/"kube-root-ca.crt" Feb 02 11:28:06 crc kubenswrapper[4838]: I0202 11:28:06.614696 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wjz54/must-gather-29j7m"] Feb 02 11:28:06 crc kubenswrapper[4838]: I0202 11:28:06.757720 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mlmt\" (UniqueName: \"kubernetes.io/projected/abf3d322-df58-43d2-bd16-302cd0c158bc-kube-api-access-2mlmt\") pod \"must-gather-29j7m\" (UID: \"abf3d322-df58-43d2-bd16-302cd0c158bc\") " pod="openshift-must-gather-wjz54/must-gather-29j7m" Feb 02 11:28:06 crc kubenswrapper[4838]: I0202 11:28:06.757831 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/abf3d322-df58-43d2-bd16-302cd0c158bc-must-gather-output\") pod \"must-gather-29j7m\" (UID: \"abf3d322-df58-43d2-bd16-302cd0c158bc\") " pod="openshift-must-gather-wjz54/must-gather-29j7m" Feb 02 11:28:06 crc kubenswrapper[4838]: I0202 11:28:06.859906 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mlmt\" (UniqueName: \"kubernetes.io/projected/abf3d322-df58-43d2-bd16-302cd0c158bc-kube-api-access-2mlmt\") pod \"must-gather-29j7m\" (UID: \"abf3d322-df58-43d2-bd16-302cd0c158bc\") " pod="openshift-must-gather-wjz54/must-gather-29j7m" Feb 02 11:28:06 crc kubenswrapper[4838]: I0202 11:28:06.859943 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/abf3d322-df58-43d2-bd16-302cd0c158bc-must-gather-output\") pod \"must-gather-29j7m\" (UID: \"abf3d322-df58-43d2-bd16-302cd0c158bc\") " pod="openshift-must-gather-wjz54/must-gather-29j7m" Feb 02 11:28:06 crc kubenswrapper[4838]: I0202 11:28:06.860319 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/abf3d322-df58-43d2-bd16-302cd0c158bc-must-gather-output\") pod \"must-gather-29j7m\" (UID: \"abf3d322-df58-43d2-bd16-302cd0c158bc\") " pod="openshift-must-gather-wjz54/must-gather-29j7m" Feb 02 11:28:06 crc kubenswrapper[4838]: I0202 11:28:06.878646 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mlmt\" (UniqueName: \"kubernetes.io/projected/abf3d322-df58-43d2-bd16-302cd0c158bc-kube-api-access-2mlmt\") pod \"must-gather-29j7m\" (UID: \"abf3d322-df58-43d2-bd16-302cd0c158bc\") " pod="openshift-must-gather-wjz54/must-gather-29j7m" Feb 02 11:28:06 crc kubenswrapper[4838]: I0202 11:28:06.915709 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wjz54/must-gather-29j7m" Feb 02 11:28:07 crc kubenswrapper[4838]: I0202 11:28:07.425273 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wjz54/must-gather-29j7m"] Feb 02 11:28:07 crc kubenswrapper[4838]: I0202 11:28:07.773269 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wjz54/must-gather-29j7m" event={"ID":"abf3d322-df58-43d2-bd16-302cd0c158bc","Type":"ContainerStarted","Data":"4fcb19906ffe674bab582213793f123169235b52321489dfb39204596b299ee8"} Feb 02 11:28:11 crc kubenswrapper[4838]: I0202 11:28:11.824284 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wjz54/must-gather-29j7m" event={"ID":"abf3d322-df58-43d2-bd16-302cd0c158bc","Type":"ContainerStarted","Data":"1fb78069fad3acd5b8e9446eb004f3c493dbe6fe08ab7e9bee93b182ebdf5dbf"} Feb 02 11:28:11 crc kubenswrapper[4838]: I0202 11:28:11.825495 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wjz54/must-gather-29j7m" event={"ID":"abf3d322-df58-43d2-bd16-302cd0c158bc","Type":"ContainerStarted","Data":"1a6797b317718ed507b3bf849cb2fc36665096839e4c76adee307ce96fe89986"} Feb 02 11:28:11 crc kubenswrapper[4838]: I0202 11:28:11.840851 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wjz54/must-gather-29j7m" podStartSLOduration=2.003735561 podStartE2EDuration="5.840812626s" podCreationTimestamp="2026-02-02 11:28:06 +0000 UTC" firstStartedPulling="2026-02-02 11:28:07.436983268 +0000 UTC m=+2081.774084296" lastFinishedPulling="2026-02-02 11:28:11.274060323 +0000 UTC m=+2085.611161361" observedRunningTime="2026-02-02 11:28:11.835987255 +0000 UTC m=+2086.173088293" watchObservedRunningTime="2026-02-02 11:28:11.840812626 +0000 UTC m=+2086.177913664" Feb 02 11:28:15 crc kubenswrapper[4838]: I0202 11:28:15.109954 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wjz54/crc-debug-vqhxh"] Feb 02 11:28:15 crc kubenswrapper[4838]: I0202 11:28:15.111691 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wjz54/crc-debug-vqhxh" Feb 02 11:28:15 crc kubenswrapper[4838]: I0202 11:28:15.228132 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f2aa51fa-d715-477a-9caa-f2c6757666ed-host\") pod \"crc-debug-vqhxh\" (UID: \"f2aa51fa-d715-477a-9caa-f2c6757666ed\") " pod="openshift-must-gather-wjz54/crc-debug-vqhxh" Feb 02 11:28:15 crc kubenswrapper[4838]: I0202 11:28:15.228216 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ch7d4\" (UniqueName: \"kubernetes.io/projected/f2aa51fa-d715-477a-9caa-f2c6757666ed-kube-api-access-ch7d4\") pod \"crc-debug-vqhxh\" (UID: \"f2aa51fa-d715-477a-9caa-f2c6757666ed\") " pod="openshift-must-gather-wjz54/crc-debug-vqhxh" Feb 02 11:28:15 crc kubenswrapper[4838]: I0202 11:28:15.330270 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f2aa51fa-d715-477a-9caa-f2c6757666ed-host\") pod \"crc-debug-vqhxh\" (UID: \"f2aa51fa-d715-477a-9caa-f2c6757666ed\") " pod="openshift-must-gather-wjz54/crc-debug-vqhxh" Feb 02 11:28:15 crc kubenswrapper[4838]: I0202 11:28:15.330340 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ch7d4\" (UniqueName: \"kubernetes.io/projected/f2aa51fa-d715-477a-9caa-f2c6757666ed-kube-api-access-ch7d4\") pod \"crc-debug-vqhxh\" (UID: \"f2aa51fa-d715-477a-9caa-f2c6757666ed\") " pod="openshift-must-gather-wjz54/crc-debug-vqhxh" Feb 02 11:28:15 crc kubenswrapper[4838]: I0202 11:28:15.330448 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f2aa51fa-d715-477a-9caa-f2c6757666ed-host\") pod \"crc-debug-vqhxh\" (UID: \"f2aa51fa-d715-477a-9caa-f2c6757666ed\") " pod="openshift-must-gather-wjz54/crc-debug-vqhxh" Feb 02 11:28:15 crc kubenswrapper[4838]: I0202 11:28:15.352742 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ch7d4\" (UniqueName: \"kubernetes.io/projected/f2aa51fa-d715-477a-9caa-f2c6757666ed-kube-api-access-ch7d4\") pod \"crc-debug-vqhxh\" (UID: \"f2aa51fa-d715-477a-9caa-f2c6757666ed\") " pod="openshift-must-gather-wjz54/crc-debug-vqhxh" Feb 02 11:28:15 crc kubenswrapper[4838]: I0202 11:28:15.430097 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:28:15 crc kubenswrapper[4838]: I0202 11:28:15.430161 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:28:15 crc kubenswrapper[4838]: I0202 11:28:15.444858 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wjz54/crc-debug-vqhxh" Feb 02 11:28:15 crc kubenswrapper[4838]: W0202 11:28:15.472556 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf2aa51fa_d715_477a_9caa_f2c6757666ed.slice/crio-6f2b545cde72ec06e4a8ce3829e439ee38b8c95fe9d585308fdb51cebb5528fc WatchSource:0}: Error finding container 6f2b545cde72ec06e4a8ce3829e439ee38b8c95fe9d585308fdb51cebb5528fc: Status 404 returned error can't find the container with id 6f2b545cde72ec06e4a8ce3829e439ee38b8c95fe9d585308fdb51cebb5528fc Feb 02 11:28:15 crc kubenswrapper[4838]: I0202 11:28:15.858061 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wjz54/crc-debug-vqhxh" event={"ID":"f2aa51fa-d715-477a-9caa-f2c6757666ed","Type":"ContainerStarted","Data":"6f2b545cde72ec06e4a8ce3829e439ee38b8c95fe9d585308fdb51cebb5528fc"} Feb 02 11:28:29 crc kubenswrapper[4838]: I0202 11:28:29.002002 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wjz54/crc-debug-vqhxh" event={"ID":"f2aa51fa-d715-477a-9caa-f2c6757666ed","Type":"ContainerStarted","Data":"ff42c1290b1ede43136cc4c470a465a0ba7fd915cca466e05db3fb5f398e2aa2"} Feb 02 11:28:29 crc kubenswrapper[4838]: I0202 11:28:29.030288 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wjz54/crc-debug-vqhxh" podStartSLOduration=0.860464129 podStartE2EDuration="14.030271434s" podCreationTimestamp="2026-02-02 11:28:15 +0000 UTC" firstStartedPulling="2026-02-02 11:28:15.474660219 +0000 UTC m=+2089.811761247" lastFinishedPulling="2026-02-02 11:28:28.644467524 +0000 UTC m=+2102.981568552" observedRunningTime="2026-02-02 11:28:29.01868259 +0000 UTC m=+2103.355783628" watchObservedRunningTime="2026-02-02 11:28:29.030271434 +0000 UTC m=+2103.367372462" Feb 02 11:28:45 crc kubenswrapper[4838]: I0202 11:28:45.430066 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:28:45 crc kubenswrapper[4838]: I0202 11:28:45.430609 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:28:48 crc kubenswrapper[4838]: I0202 11:28:48.769480 4838 scope.go:117] "RemoveContainer" containerID="da64accedd8b51facf235c937846e436195424ab0dceb46c6920cb511f620b45" Feb 02 11:28:49 crc kubenswrapper[4838]: I0202 11:28:49.201548 4838 scope.go:117] "RemoveContainer" containerID="28858d042b50683d5322341f67456255cb695f314cfbe492c35999dd9c4bf282" Feb 02 11:28:51 crc kubenswrapper[4838]: I0202 11:28:51.036173 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-4r9v4"] Feb 02 11:28:51 crc kubenswrapper[4838]: I0202 11:28:51.043594 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-4r9v4"] Feb 02 11:28:52 crc kubenswrapper[4838]: I0202 11:28:52.516955 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="948b1ebc-d2a2-4e7b-aa2e-d215bbae015d" path="/var/lib/kubelet/pods/948b1ebc-d2a2-4e7b-aa2e-d215bbae015d/volumes" Feb 02 11:29:12 crc kubenswrapper[4838]: I0202 11:29:12.362535 4838 generic.go:334] "Generic (PLEG): container finished" podID="f2aa51fa-d715-477a-9caa-f2c6757666ed" containerID="ff42c1290b1ede43136cc4c470a465a0ba7fd915cca466e05db3fb5f398e2aa2" exitCode=0 Feb 02 11:29:12 crc kubenswrapper[4838]: I0202 11:29:12.362674 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wjz54/crc-debug-vqhxh" event={"ID":"f2aa51fa-d715-477a-9caa-f2c6757666ed","Type":"ContainerDied","Data":"ff42c1290b1ede43136cc4c470a465a0ba7fd915cca466e05db3fb5f398e2aa2"} Feb 02 11:29:13 crc kubenswrapper[4838]: I0202 11:29:13.473708 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wjz54/crc-debug-vqhxh" Feb 02 11:29:13 crc kubenswrapper[4838]: I0202 11:29:13.504462 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wjz54/crc-debug-vqhxh"] Feb 02 11:29:13 crc kubenswrapper[4838]: I0202 11:29:13.513202 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wjz54/crc-debug-vqhxh"] Feb 02 11:29:13 crc kubenswrapper[4838]: I0202 11:29:13.559424 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f2aa51fa-d715-477a-9caa-f2c6757666ed-host\") pod \"f2aa51fa-d715-477a-9caa-f2c6757666ed\" (UID: \"f2aa51fa-d715-477a-9caa-f2c6757666ed\") " Feb 02 11:29:13 crc kubenswrapper[4838]: I0202 11:29:13.559539 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ch7d4\" (UniqueName: \"kubernetes.io/projected/f2aa51fa-d715-477a-9caa-f2c6757666ed-kube-api-access-ch7d4\") pod \"f2aa51fa-d715-477a-9caa-f2c6757666ed\" (UID: \"f2aa51fa-d715-477a-9caa-f2c6757666ed\") " Feb 02 11:29:13 crc kubenswrapper[4838]: I0202 11:29:13.559764 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f2aa51fa-d715-477a-9caa-f2c6757666ed-host" (OuterVolumeSpecName: "host") pod "f2aa51fa-d715-477a-9caa-f2c6757666ed" (UID: "f2aa51fa-d715-477a-9caa-f2c6757666ed"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:29:13 crc kubenswrapper[4838]: I0202 11:29:13.561688 4838 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f2aa51fa-d715-477a-9caa-f2c6757666ed-host\") on node \"crc\" DevicePath \"\"" Feb 02 11:29:13 crc kubenswrapper[4838]: I0202 11:29:13.568478 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2aa51fa-d715-477a-9caa-f2c6757666ed-kube-api-access-ch7d4" (OuterVolumeSpecName: "kube-api-access-ch7d4") pod "f2aa51fa-d715-477a-9caa-f2c6757666ed" (UID: "f2aa51fa-d715-477a-9caa-f2c6757666ed"). InnerVolumeSpecName "kube-api-access-ch7d4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:29:13 crc kubenswrapper[4838]: I0202 11:29:13.663834 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ch7d4\" (UniqueName: \"kubernetes.io/projected/f2aa51fa-d715-477a-9caa-f2c6757666ed-kube-api-access-ch7d4\") on node \"crc\" DevicePath \"\"" Feb 02 11:29:14 crc kubenswrapper[4838]: I0202 11:29:14.378420 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f2b545cde72ec06e4a8ce3829e439ee38b8c95fe9d585308fdb51cebb5528fc" Feb 02 11:29:14 crc kubenswrapper[4838]: I0202 11:29:14.378464 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wjz54/crc-debug-vqhxh" Feb 02 11:29:14 crc kubenswrapper[4838]: I0202 11:29:14.518008 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2aa51fa-d715-477a-9caa-f2c6757666ed" path="/var/lib/kubelet/pods/f2aa51fa-d715-477a-9caa-f2c6757666ed/volumes" Feb 02 11:29:14 crc kubenswrapper[4838]: I0202 11:29:14.711440 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wjz54/crc-debug-c9qd6"] Feb 02 11:29:14 crc kubenswrapper[4838]: E0202 11:29:14.711963 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2aa51fa-d715-477a-9caa-f2c6757666ed" containerName="container-00" Feb 02 11:29:14 crc kubenswrapper[4838]: I0202 11:29:14.711986 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2aa51fa-d715-477a-9caa-f2c6757666ed" containerName="container-00" Feb 02 11:29:14 crc kubenswrapper[4838]: I0202 11:29:14.712186 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2aa51fa-d715-477a-9caa-f2c6757666ed" containerName="container-00" Feb 02 11:29:14 crc kubenswrapper[4838]: I0202 11:29:14.712842 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wjz54/crc-debug-c9qd6" Feb 02 11:29:14 crc kubenswrapper[4838]: I0202 11:29:14.793870 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hv6l6\" (UniqueName: \"kubernetes.io/projected/87afdcf6-37a3-4c74-9cb5-ea3e36c83677-kube-api-access-hv6l6\") pod \"crc-debug-c9qd6\" (UID: \"87afdcf6-37a3-4c74-9cb5-ea3e36c83677\") " pod="openshift-must-gather-wjz54/crc-debug-c9qd6" Feb 02 11:29:14 crc kubenswrapper[4838]: I0202 11:29:14.794030 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/87afdcf6-37a3-4c74-9cb5-ea3e36c83677-host\") pod \"crc-debug-c9qd6\" (UID: \"87afdcf6-37a3-4c74-9cb5-ea3e36c83677\") " pod="openshift-must-gather-wjz54/crc-debug-c9qd6" Feb 02 11:29:14 crc kubenswrapper[4838]: I0202 11:29:14.896261 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/87afdcf6-37a3-4c74-9cb5-ea3e36c83677-host\") pod \"crc-debug-c9qd6\" (UID: \"87afdcf6-37a3-4c74-9cb5-ea3e36c83677\") " pod="openshift-must-gather-wjz54/crc-debug-c9qd6" Feb 02 11:29:14 crc kubenswrapper[4838]: I0202 11:29:14.896405 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/87afdcf6-37a3-4c74-9cb5-ea3e36c83677-host\") pod \"crc-debug-c9qd6\" (UID: \"87afdcf6-37a3-4c74-9cb5-ea3e36c83677\") " pod="openshift-must-gather-wjz54/crc-debug-c9qd6" Feb 02 11:29:14 crc kubenswrapper[4838]: I0202 11:29:14.896416 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hv6l6\" (UniqueName: \"kubernetes.io/projected/87afdcf6-37a3-4c74-9cb5-ea3e36c83677-kube-api-access-hv6l6\") pod \"crc-debug-c9qd6\" (UID: \"87afdcf6-37a3-4c74-9cb5-ea3e36c83677\") " pod="openshift-must-gather-wjz54/crc-debug-c9qd6" Feb 02 11:29:14 crc kubenswrapper[4838]: I0202 11:29:14.916074 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hv6l6\" (UniqueName: \"kubernetes.io/projected/87afdcf6-37a3-4c74-9cb5-ea3e36c83677-kube-api-access-hv6l6\") pod \"crc-debug-c9qd6\" (UID: \"87afdcf6-37a3-4c74-9cb5-ea3e36c83677\") " pod="openshift-must-gather-wjz54/crc-debug-c9qd6" Feb 02 11:29:15 crc kubenswrapper[4838]: I0202 11:29:15.029764 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wjz54/crc-debug-c9qd6" Feb 02 11:29:15 crc kubenswrapper[4838]: I0202 11:29:15.388959 4838 generic.go:334] "Generic (PLEG): container finished" podID="87afdcf6-37a3-4c74-9cb5-ea3e36c83677" containerID="2ddab62bae1ba19a68f8b30b776f56110e5b7e8aebdf38905b71ba8708109810" exitCode=0 Feb 02 11:29:15 crc kubenswrapper[4838]: I0202 11:29:15.389058 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wjz54/crc-debug-c9qd6" event={"ID":"87afdcf6-37a3-4c74-9cb5-ea3e36c83677","Type":"ContainerDied","Data":"2ddab62bae1ba19a68f8b30b776f56110e5b7e8aebdf38905b71ba8708109810"} Feb 02 11:29:15 crc kubenswrapper[4838]: I0202 11:29:15.389313 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wjz54/crc-debug-c9qd6" event={"ID":"87afdcf6-37a3-4c74-9cb5-ea3e36c83677","Type":"ContainerStarted","Data":"c3ca09e58f5ac1783f0f33268a96a6d7ae4199594f4ed0fb876f783625a75ffb"} Feb 02 11:29:15 crc kubenswrapper[4838]: I0202 11:29:15.429350 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:29:15 crc kubenswrapper[4838]: I0202 11:29:15.429412 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:29:15 crc kubenswrapper[4838]: I0202 11:29:15.429471 4838 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 11:29:15 crc kubenswrapper[4838]: I0202 11:29:15.430381 4838 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"175ffe8c2785ccb5a4bb5ad4bb28adaa432562c8282f7529b47cf5fe3050f259"} pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 11:29:15 crc kubenswrapper[4838]: I0202 11:29:15.430461 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" containerID="cri-o://175ffe8c2785ccb5a4bb5ad4bb28adaa432562c8282f7529b47cf5fe3050f259" gracePeriod=600 Feb 02 11:29:15 crc kubenswrapper[4838]: I0202 11:29:15.825415 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wjz54/crc-debug-c9qd6"] Feb 02 11:29:16 crc kubenswrapper[4838]: I0202 11:29:15.833411 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wjz54/crc-debug-c9qd6"] Feb 02 11:29:16 crc kubenswrapper[4838]: I0202 11:29:16.400120 4838 generic.go:334] "Generic (PLEG): container finished" podID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerID="175ffe8c2785ccb5a4bb5ad4bb28adaa432562c8282f7529b47cf5fe3050f259" exitCode=0 Feb 02 11:29:16 crc kubenswrapper[4838]: I0202 11:29:16.400204 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerDied","Data":"175ffe8c2785ccb5a4bb5ad4bb28adaa432562c8282f7529b47cf5fe3050f259"} Feb 02 11:29:16 crc kubenswrapper[4838]: I0202 11:29:16.400519 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerStarted","Data":"fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980"} Feb 02 11:29:16 crc kubenswrapper[4838]: I0202 11:29:16.400538 4838 scope.go:117] "RemoveContainer" containerID="0eb2144ffe05e7140a6ef2e53d77634749d4b12e36a535d1284618b8a96131e9" Feb 02 11:29:16 crc kubenswrapper[4838]: I0202 11:29:16.529532 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wjz54/crc-debug-c9qd6" Feb 02 11:29:16 crc kubenswrapper[4838]: I0202 11:29:16.630120 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/87afdcf6-37a3-4c74-9cb5-ea3e36c83677-host\") pod \"87afdcf6-37a3-4c74-9cb5-ea3e36c83677\" (UID: \"87afdcf6-37a3-4c74-9cb5-ea3e36c83677\") " Feb 02 11:29:16 crc kubenswrapper[4838]: I0202 11:29:16.630200 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/87afdcf6-37a3-4c74-9cb5-ea3e36c83677-host" (OuterVolumeSpecName: "host") pod "87afdcf6-37a3-4c74-9cb5-ea3e36c83677" (UID: "87afdcf6-37a3-4c74-9cb5-ea3e36c83677"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:29:16 crc kubenswrapper[4838]: I0202 11:29:16.630212 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hv6l6\" (UniqueName: \"kubernetes.io/projected/87afdcf6-37a3-4c74-9cb5-ea3e36c83677-kube-api-access-hv6l6\") pod \"87afdcf6-37a3-4c74-9cb5-ea3e36c83677\" (UID: \"87afdcf6-37a3-4c74-9cb5-ea3e36c83677\") " Feb 02 11:29:16 crc kubenswrapper[4838]: I0202 11:29:16.631255 4838 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/87afdcf6-37a3-4c74-9cb5-ea3e36c83677-host\") on node \"crc\" DevicePath \"\"" Feb 02 11:29:16 crc kubenswrapper[4838]: I0202 11:29:16.635829 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87afdcf6-37a3-4c74-9cb5-ea3e36c83677-kube-api-access-hv6l6" (OuterVolumeSpecName: "kube-api-access-hv6l6") pod "87afdcf6-37a3-4c74-9cb5-ea3e36c83677" (UID: "87afdcf6-37a3-4c74-9cb5-ea3e36c83677"). InnerVolumeSpecName "kube-api-access-hv6l6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:29:16 crc kubenswrapper[4838]: I0202 11:29:16.732502 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hv6l6\" (UniqueName: \"kubernetes.io/projected/87afdcf6-37a3-4c74-9cb5-ea3e36c83677-kube-api-access-hv6l6\") on node \"crc\" DevicePath \"\"" Feb 02 11:29:17 crc kubenswrapper[4838]: I0202 11:29:17.302524 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wjz54/crc-debug-glp7r"] Feb 02 11:29:17 crc kubenswrapper[4838]: E0202 11:29:17.302917 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87afdcf6-37a3-4c74-9cb5-ea3e36c83677" containerName="container-00" Feb 02 11:29:17 crc kubenswrapper[4838]: I0202 11:29:17.302931 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="87afdcf6-37a3-4c74-9cb5-ea3e36c83677" containerName="container-00" Feb 02 11:29:17 crc kubenswrapper[4838]: I0202 11:29:17.303111 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="87afdcf6-37a3-4c74-9cb5-ea3e36c83677" containerName="container-00" Feb 02 11:29:17 crc kubenswrapper[4838]: I0202 11:29:17.303821 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wjz54/crc-debug-glp7r" Feb 02 11:29:17 crc kubenswrapper[4838]: I0202 11:29:17.344497 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3a7490c6-f070-4f41-bb03-601c411dc7ee-host\") pod \"crc-debug-glp7r\" (UID: \"3a7490c6-f070-4f41-bb03-601c411dc7ee\") " pod="openshift-must-gather-wjz54/crc-debug-glp7r" Feb 02 11:29:17 crc kubenswrapper[4838]: I0202 11:29:17.344559 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjv97\" (UniqueName: \"kubernetes.io/projected/3a7490c6-f070-4f41-bb03-601c411dc7ee-kube-api-access-hjv97\") pod \"crc-debug-glp7r\" (UID: \"3a7490c6-f070-4f41-bb03-601c411dc7ee\") " pod="openshift-must-gather-wjz54/crc-debug-glp7r" Feb 02 11:29:17 crc kubenswrapper[4838]: I0202 11:29:17.410245 4838 scope.go:117] "RemoveContainer" containerID="2ddab62bae1ba19a68f8b30b776f56110e5b7e8aebdf38905b71ba8708109810" Feb 02 11:29:17 crc kubenswrapper[4838]: I0202 11:29:17.410259 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wjz54/crc-debug-c9qd6" Feb 02 11:29:17 crc kubenswrapper[4838]: I0202 11:29:17.446418 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3a7490c6-f070-4f41-bb03-601c411dc7ee-host\") pod \"crc-debug-glp7r\" (UID: \"3a7490c6-f070-4f41-bb03-601c411dc7ee\") " pod="openshift-must-gather-wjz54/crc-debug-glp7r" Feb 02 11:29:17 crc kubenswrapper[4838]: I0202 11:29:17.446754 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjv97\" (UniqueName: \"kubernetes.io/projected/3a7490c6-f070-4f41-bb03-601c411dc7ee-kube-api-access-hjv97\") pod \"crc-debug-glp7r\" (UID: \"3a7490c6-f070-4f41-bb03-601c411dc7ee\") " pod="openshift-must-gather-wjz54/crc-debug-glp7r" Feb 02 11:29:17 crc kubenswrapper[4838]: I0202 11:29:17.446999 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3a7490c6-f070-4f41-bb03-601c411dc7ee-host\") pod \"crc-debug-glp7r\" (UID: \"3a7490c6-f070-4f41-bb03-601c411dc7ee\") " pod="openshift-must-gather-wjz54/crc-debug-glp7r" Feb 02 11:29:17 crc kubenswrapper[4838]: I0202 11:29:17.469404 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjv97\" (UniqueName: \"kubernetes.io/projected/3a7490c6-f070-4f41-bb03-601c411dc7ee-kube-api-access-hjv97\") pod \"crc-debug-glp7r\" (UID: \"3a7490c6-f070-4f41-bb03-601c411dc7ee\") " pod="openshift-must-gather-wjz54/crc-debug-glp7r" Feb 02 11:29:17 crc kubenswrapper[4838]: I0202 11:29:17.624819 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wjz54/crc-debug-glp7r" Feb 02 11:29:17 crc kubenswrapper[4838]: W0202 11:29:17.674084 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3a7490c6_f070_4f41_bb03_601c411dc7ee.slice/crio-aeb2bf8a884d55f333ded7b841713eb27a23c4b112e1fb67475c97f610320f76 WatchSource:0}: Error finding container aeb2bf8a884d55f333ded7b841713eb27a23c4b112e1fb67475c97f610320f76: Status 404 returned error can't find the container with id aeb2bf8a884d55f333ded7b841713eb27a23c4b112e1fb67475c97f610320f76 Feb 02 11:29:18 crc kubenswrapper[4838]: E0202 11:29:18.056053 4838 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3a7490c6_f070_4f41_bb03_601c411dc7ee.slice/crio-08af24d3563f035026741653a643be9e62bf207408f9c9b5f4a93e048af50c85.scope\": RecentStats: unable to find data in memory cache]" Feb 02 11:29:18 crc kubenswrapper[4838]: I0202 11:29:18.429813 4838 generic.go:334] "Generic (PLEG): container finished" podID="3a7490c6-f070-4f41-bb03-601c411dc7ee" containerID="08af24d3563f035026741653a643be9e62bf207408f9c9b5f4a93e048af50c85" exitCode=0 Feb 02 11:29:18 crc kubenswrapper[4838]: I0202 11:29:18.429989 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wjz54/crc-debug-glp7r" event={"ID":"3a7490c6-f070-4f41-bb03-601c411dc7ee","Type":"ContainerDied","Data":"08af24d3563f035026741653a643be9e62bf207408f9c9b5f4a93e048af50c85"} Feb 02 11:29:18 crc kubenswrapper[4838]: I0202 11:29:18.430182 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wjz54/crc-debug-glp7r" event={"ID":"3a7490c6-f070-4f41-bb03-601c411dc7ee","Type":"ContainerStarted","Data":"aeb2bf8a884d55f333ded7b841713eb27a23c4b112e1fb67475c97f610320f76"} Feb 02 11:29:18 crc kubenswrapper[4838]: I0202 11:29:18.467387 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wjz54/crc-debug-glp7r"] Feb 02 11:29:18 crc kubenswrapper[4838]: I0202 11:29:18.475258 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wjz54/crc-debug-glp7r"] Feb 02 11:29:18 crc kubenswrapper[4838]: I0202 11:29:18.521432 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87afdcf6-37a3-4c74-9cb5-ea3e36c83677" path="/var/lib/kubelet/pods/87afdcf6-37a3-4c74-9cb5-ea3e36c83677/volumes" Feb 02 11:29:19 crc kubenswrapper[4838]: I0202 11:29:19.550822 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wjz54/crc-debug-glp7r" Feb 02 11:29:19 crc kubenswrapper[4838]: I0202 11:29:19.586307 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjv97\" (UniqueName: \"kubernetes.io/projected/3a7490c6-f070-4f41-bb03-601c411dc7ee-kube-api-access-hjv97\") pod \"3a7490c6-f070-4f41-bb03-601c411dc7ee\" (UID: \"3a7490c6-f070-4f41-bb03-601c411dc7ee\") " Feb 02 11:29:19 crc kubenswrapper[4838]: I0202 11:29:19.586384 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3a7490c6-f070-4f41-bb03-601c411dc7ee-host\") pod \"3a7490c6-f070-4f41-bb03-601c411dc7ee\" (UID: \"3a7490c6-f070-4f41-bb03-601c411dc7ee\") " Feb 02 11:29:19 crc kubenswrapper[4838]: I0202 11:29:19.587585 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3a7490c6-f070-4f41-bb03-601c411dc7ee-host" (OuterVolumeSpecName: "host") pod "3a7490c6-f070-4f41-bb03-601c411dc7ee" (UID: "3a7490c6-f070-4f41-bb03-601c411dc7ee"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:29:19 crc kubenswrapper[4838]: I0202 11:29:19.593936 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a7490c6-f070-4f41-bb03-601c411dc7ee-kube-api-access-hjv97" (OuterVolumeSpecName: "kube-api-access-hjv97") pod "3a7490c6-f070-4f41-bb03-601c411dc7ee" (UID: "3a7490c6-f070-4f41-bb03-601c411dc7ee"). InnerVolumeSpecName "kube-api-access-hjv97". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:29:19 crc kubenswrapper[4838]: I0202 11:29:19.688321 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjv97\" (UniqueName: \"kubernetes.io/projected/3a7490c6-f070-4f41-bb03-601c411dc7ee-kube-api-access-hjv97\") on node \"crc\" DevicePath \"\"" Feb 02 11:29:19 crc kubenswrapper[4838]: I0202 11:29:19.688355 4838 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3a7490c6-f070-4f41-bb03-601c411dc7ee-host\") on node \"crc\" DevicePath \"\"" Feb 02 11:29:20 crc kubenswrapper[4838]: I0202 11:29:20.446740 4838 scope.go:117] "RemoveContainer" containerID="08af24d3563f035026741653a643be9e62bf207408f9c9b5f4a93e048af50c85" Feb 02 11:29:20 crc kubenswrapper[4838]: I0202 11:29:20.446940 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wjz54/crc-debug-glp7r" Feb 02 11:29:20 crc kubenswrapper[4838]: I0202 11:29:20.516502 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a7490c6-f070-4f41-bb03-601c411dc7ee" path="/var/lib/kubelet/pods/3a7490c6-f070-4f41-bb03-601c411dc7ee/volumes" Feb 02 11:29:28 crc kubenswrapper[4838]: I0202 11:29:28.035239 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-2d8xd"] Feb 02 11:29:28 crc kubenswrapper[4838]: I0202 11:29:28.043468 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-2d8xd"] Feb 02 11:29:28 crc kubenswrapper[4838]: I0202 11:29:28.519991 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f9f491b-916e-4236-96b5-8da53babac04" path="/var/lib/kubelet/pods/9f9f491b-916e-4236-96b5-8da53babac04/volumes" Feb 02 11:29:30 crc kubenswrapper[4838]: I0202 11:29:30.042455 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-m9rhm"] Feb 02 11:29:30 crc kubenswrapper[4838]: I0202 11:29:30.055673 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-m9rhm"] Feb 02 11:29:30 crc kubenswrapper[4838]: I0202 11:29:30.516786 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc01012d-a1bc-4849-8cc3-c0b3fc3f5504" path="/var/lib/kubelet/pods/dc01012d-a1bc-4849-8cc3-c0b3fc3f5504/volumes" Feb 02 11:29:34 crc kubenswrapper[4838]: I0202 11:29:34.864467 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-859b6c8866-ltwgg_a08effe2-908e-4666-8f0d-2348c91376cf/barbican-api/0.log" Feb 02 11:29:35 crc kubenswrapper[4838]: I0202 11:29:35.073451 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-859b6c8866-ltwgg_a08effe2-908e-4666-8f0d-2348c91376cf/barbican-api-log/0.log" Feb 02 11:29:35 crc kubenswrapper[4838]: I0202 11:29:35.120856 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6c7fd57b7d-nvvkt_4001ac84-f8ab-436b-b526-23940d7f0463/barbican-keystone-listener/0.log" Feb 02 11:29:35 crc kubenswrapper[4838]: I0202 11:29:35.246961 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6c7fd57b7d-nvvkt_4001ac84-f8ab-436b-b526-23940d7f0463/barbican-keystone-listener-log/0.log" Feb 02 11:29:35 crc kubenswrapper[4838]: I0202 11:29:35.363558 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5545c69c4f-jpg4z_ba0aafb6-d810-4b44-8e8a-eebc89abad94/barbican-worker-log/0.log" Feb 02 11:29:35 crc kubenswrapper[4838]: I0202 11:29:35.369976 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5545c69c4f-jpg4z_ba0aafb6-d810-4b44-8e8a-eebc89abad94/barbican-worker/0.log" Feb 02 11:29:35 crc kubenswrapper[4838]: I0202 11:29:35.555397 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b00c3f84-6034-4c0b-ad6c-52845d6743aa/ceilometer-notification-agent/0.log" Feb 02 11:29:35 crc kubenswrapper[4838]: I0202 11:29:35.562308 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b00c3f84-6034-4c0b-ad6c-52845d6743aa/ceilometer-central-agent/0.log" Feb 02 11:29:35 crc kubenswrapper[4838]: I0202 11:29:35.582907 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b00c3f84-6034-4c0b-ad6c-52845d6743aa/proxy-httpd/0.log" Feb 02 11:29:35 crc kubenswrapper[4838]: I0202 11:29:35.752099 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b00c3f84-6034-4c0b-ad6c-52845d6743aa/sg-core/0.log" Feb 02 11:29:35 crc kubenswrapper[4838]: I0202 11:29:35.771967 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0189f310-62ee-4f4d-b618-5afac393ff30/cinder-api/0.log" Feb 02 11:29:35 crc kubenswrapper[4838]: I0202 11:29:35.793408 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0189f310-62ee-4f4d-b618-5afac393ff30/cinder-api-log/0.log" Feb 02 11:29:35 crc kubenswrapper[4838]: I0202 11:29:35.978197 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_2c000131-c578-473f-8758-95ae23e12d3a/probe/0.log" Feb 02 11:29:35 crc kubenswrapper[4838]: I0202 11:29:35.993748 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_2c000131-c578-473f-8758-95ae23e12d3a/cinder-scheduler/0.log" Feb 02 11:29:36 crc kubenswrapper[4838]: I0202 11:29:36.186524 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-89c5cd4d5-bjxmv_2a16a8c7-7667-401d-93aa-d0209c7c6ea7/init/0.log" Feb 02 11:29:36 crc kubenswrapper[4838]: I0202 11:29:36.336137 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-89c5cd4d5-bjxmv_2a16a8c7-7667-401d-93aa-d0209c7c6ea7/init/0.log" Feb 02 11:29:36 crc kubenswrapper[4838]: I0202 11:29:36.360817 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_5110c446-0e66-4098-b30a-dfbdbc8e5fbe/glance-httpd/0.log" Feb 02 11:29:36 crc kubenswrapper[4838]: I0202 11:29:36.378034 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-89c5cd4d5-bjxmv_2a16a8c7-7667-401d-93aa-d0209c7c6ea7/dnsmasq-dns/0.log" Feb 02 11:29:36 crc kubenswrapper[4838]: I0202 11:29:36.542043 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_5110c446-0e66-4098-b30a-dfbdbc8e5fbe/glance-log/0.log" Feb 02 11:29:36 crc kubenswrapper[4838]: I0202 11:29:36.573203 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_8824c6f8-8e4e-436f-a4c5-755ac38d0979/glance-log/0.log" Feb 02 11:29:36 crc kubenswrapper[4838]: I0202 11:29:36.610680 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_8824c6f8-8e4e-436f-a4c5-755ac38d0979/glance-httpd/0.log" Feb 02 11:29:36 crc kubenswrapper[4838]: I0202 11:29:36.754384 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-68776656b4-wsgxk_e6281d3a-f605-47fd-a334-f5d814a86d4f/init/0.log" Feb 02 11:29:36 crc kubenswrapper[4838]: I0202 11:29:36.978990 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-68776656b4-wsgxk_e6281d3a-f605-47fd-a334-f5d814a86d4f/init/0.log" Feb 02 11:29:37 crc kubenswrapper[4838]: I0202 11:29:37.057752 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-68776656b4-wsgxk_e6281d3a-f605-47fd-a334-f5d814a86d4f/ironic-api-log/0.log" Feb 02 11:29:37 crc kubenswrapper[4838]: I0202 11:29:37.142823 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-68776656b4-wsgxk_e6281d3a-f605-47fd-a334-f5d814a86d4f/ironic-api/0.log" Feb 02 11:29:37 crc kubenswrapper[4838]: I0202 11:29:37.199180 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/init/0.log" Feb 02 11:29:37 crc kubenswrapper[4838]: I0202 11:29:37.410837 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/ironic-python-agent-init/0.log" Feb 02 11:29:37 crc kubenswrapper[4838]: I0202 11:29:37.430718 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/init/0.log" Feb 02 11:29:37 crc kubenswrapper[4838]: I0202 11:29:37.491878 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/ironic-python-agent-init/0.log" Feb 02 11:29:37 crc kubenswrapper[4838]: I0202 11:29:37.668473 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/ironic-python-agent-init/0.log" Feb 02 11:29:37 crc kubenswrapper[4838]: I0202 11:29:37.673838 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/init/0.log" Feb 02 11:29:38 crc kubenswrapper[4838]: I0202 11:29:38.021141 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/init/0.log" Feb 02 11:29:38 crc kubenswrapper[4838]: I0202 11:29:38.260902 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/ironic-python-agent-init/0.log" Feb 02 11:29:38 crc kubenswrapper[4838]: I0202 11:29:38.384077 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/pxe-init/0.log" Feb 02 11:29:38 crc kubenswrapper[4838]: I0202 11:29:38.596490 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/httpboot/0.log" Feb 02 11:29:38 crc kubenswrapper[4838]: I0202 11:29:38.846561 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/ironic-conductor/0.log" Feb 02 11:29:38 crc kubenswrapper[4838]: I0202 11:29:38.961736 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/pxe-init/0.log" Feb 02 11:29:39 crc kubenswrapper[4838]: I0202 11:29:39.064012 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/ramdisk-logs/0.log" Feb 02 11:29:39 crc kubenswrapper[4838]: I0202 11:29:39.131513 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/pxe-init/0.log" Feb 02 11:29:39 crc kubenswrapper[4838]: I0202 11:29:39.188581 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-db-sync-z8sb2_04bf896a-e964-48a2-900e-44362394a6ac/init/0.log" Feb 02 11:29:39 crc kubenswrapper[4838]: I0202 11:29:39.407902 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_fe57cd8a-a524-426c-a2f4-401cd5642248/ironic-python-agent-init/0.log" Feb 02 11:29:39 crc kubenswrapper[4838]: I0202 11:29:39.415460 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/pxe-init/0.log" Feb 02 11:29:39 crc kubenswrapper[4838]: I0202 11:29:39.418706 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-db-sync-z8sb2_04bf896a-e964-48a2-900e-44362394a6ac/init/0.log" Feb 02 11:29:39 crc kubenswrapper[4838]: I0202 11:29:39.426963 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-db-sync-z8sb2_04bf896a-e964-48a2-900e-44362394a6ac/ironic-db-sync/0.log" Feb 02 11:29:39 crc kubenswrapper[4838]: I0202 11:29:39.641374 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_fe57cd8a-a524-426c-a2f4-401cd5642248/ironic-python-agent-init/0.log" Feb 02 11:29:39 crc kubenswrapper[4838]: I0202 11:29:39.659354 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_fe57cd8a-a524-426c-a2f4-401cd5642248/inspector-pxe-init/0.log" Feb 02 11:29:39 crc kubenswrapper[4838]: I0202 11:29:39.660933 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_fe57cd8a-a524-426c-a2f4-401cd5642248/inspector-pxe-init/0.log" Feb 02 11:29:39 crc kubenswrapper[4838]: I0202 11:29:39.827432 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_fe57cd8a-a524-426c-a2f4-401cd5642248/inspector-pxe-init/0.log" Feb 02 11:29:39 crc kubenswrapper[4838]: I0202 11:29:39.833231 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_fe57cd8a-a524-426c-a2f4-401cd5642248/inspector-httpboot/0.log" Feb 02 11:29:39 crc kubenswrapper[4838]: I0202 11:29:39.853314 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_fe57cd8a-a524-426c-a2f4-401cd5642248/ironic-python-agent-init/0.log" Feb 02 11:29:39 crc kubenswrapper[4838]: I0202 11:29:39.874919 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_fe57cd8a-a524-426c-a2f4-401cd5642248/ironic-inspector-httpd/0.log" Feb 02 11:29:39 crc kubenswrapper[4838]: I0202 11:29:39.879228 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_fe57cd8a-a524-426c-a2f4-401cd5642248/ironic-inspector/0.log" Feb 02 11:29:40 crc kubenswrapper[4838]: I0202 11:29:40.003231 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_fe57cd8a-a524-426c-a2f4-401cd5642248/ramdisk-logs/0.log" Feb 02 11:29:40 crc kubenswrapper[4838]: I0202 11:29:40.045497 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-db-sync-s47dm_1a115a1d-336b-4c0d-81c4-3ce5c52b05a5/ironic-inspector-db-sync/0.log" Feb 02 11:29:40 crc kubenswrapper[4838]: I0202 11:29:40.085376 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-neutron-agent-9c5f849b9-h2frc_cb80ba2d-57e2-4a6d-95cc-e67af228cb54/ironic-neutron-agent/0.log" Feb 02 11:29:40 crc kubenswrapper[4838]: I0202 11:29:40.282442 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_253cbeb6-ec5a-404a-904c-d06b377ed987/kube-state-metrics/0.log" Feb 02 11:29:40 crc kubenswrapper[4838]: I0202 11:29:40.372353 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-554c88cfc6-svbvn_04f65e34-3c92-4288-86f4-cfc67c46de23/keystone-api/0.log" Feb 02 11:29:40 crc kubenswrapper[4838]: I0202 11:29:40.562328 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-595df946b7-5b7qm_de9fd7ce-936c-460d-b33b-e9b089a7d495/neutron-httpd/0.log" Feb 02 11:29:40 crc kubenswrapper[4838]: I0202 11:29:40.632594 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-595df946b7-5b7qm_de9fd7ce-936c-460d-b33b-e9b089a7d495/neutron-api/0.log" Feb 02 11:29:40 crc kubenswrapper[4838]: I0202 11:29:40.928133 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_b3dc8cbc-2c9c-4192-99e3-7724d3c28c68/nova-api-log/0.log" Feb 02 11:29:40 crc kubenswrapper[4838]: I0202 11:29:40.987451 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_b3dc8cbc-2c9c-4192-99e3-7724d3c28c68/nova-api-api/0.log" Feb 02 11:29:41 crc kubenswrapper[4838]: I0202 11:29:41.012057 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_29a3c9fb-a43f-4867-94e9-dd205f0fb517/nova-cell0-conductor-conductor/0.log" Feb 02 11:29:41 crc kubenswrapper[4838]: I0202 11:29:41.140193 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-cell-mapping-9ln9l_b6ecc1cc-2175-4372-ae32-61761f66a342/nova-manage/0.log" Feb 02 11:29:41 crc kubenswrapper[4838]: I0202 11:29:41.329949 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_966289ba-cb66-4cf4-adff-45ac19b18add/nova-cell1-conductor-conductor/0.log" Feb 02 11:29:41 crc kubenswrapper[4838]: I0202 11:29:41.335134 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-db-sync-5tpqb_7ce26605-8dfc-48cd-a362-1a37c67ea300/nova-cell1-conductor-db-sync/0.log" Feb 02 11:29:41 crc kubenswrapper[4838]: I0202 11:29:41.676249 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_7e38e111-e96a-4196-84d2-9f6f2cd192dc/nova-cell1-novncproxy-novncproxy/0.log" Feb 02 11:29:41 crc kubenswrapper[4838]: I0202 11:29:41.678508 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_b6d9b193-d35a-40e7-87da-b20cfaca82b4/nova-metadata-log/0.log" Feb 02 11:29:41 crc kubenswrapper[4838]: I0202 11:29:41.951914 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_1f864d41-d3ba-4025-9ffc-d60bb52a18b1/nova-scheduler-scheduler/0.log" Feb 02 11:29:41 crc kubenswrapper[4838]: I0202 11:29:41.954736 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_b6d9b193-d35a-40e7-87da-b20cfaca82b4/nova-metadata-metadata/0.log" Feb 02 11:29:42 crc kubenswrapper[4838]: I0202 11:29:42.113512 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_2f841e0c-b40f-4dd1-8427-ea07840bcdf6/mysql-bootstrap/0.log" Feb 02 11:29:42 crc kubenswrapper[4838]: I0202 11:29:42.323130 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5b42feb4-a718-4036-be9e-3113b97680c4/mysql-bootstrap/0.log" Feb 02 11:29:42 crc kubenswrapper[4838]: I0202 11:29:42.328759 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_2f841e0c-b40f-4dd1-8427-ea07840bcdf6/mysql-bootstrap/0.log" Feb 02 11:29:42 crc kubenswrapper[4838]: I0202 11:29:42.366675 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_2f841e0c-b40f-4dd1-8427-ea07840bcdf6/galera/0.log" Feb 02 11:29:42 crc kubenswrapper[4838]: I0202 11:29:42.516927 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5b42feb4-a718-4036-be9e-3113b97680c4/mysql-bootstrap/0.log" Feb 02 11:29:42 crc kubenswrapper[4838]: I0202 11:29:42.590156 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406/openstackclient/0.log" Feb 02 11:29:42 crc kubenswrapper[4838]: I0202 11:29:42.620515 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5b42feb4-a718-4036-be9e-3113b97680c4/galera/0.log" Feb 02 11:29:42 crc kubenswrapper[4838]: I0202 11:29:42.814981 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-78llm_89d377c3-4929-47c4-abc7-53bb5e058025/ovn-controller/0.log" Feb 02 11:29:42 crc kubenswrapper[4838]: I0202 11:29:42.830528 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-p2lkw_1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e/openstack-network-exporter/0.log" Feb 02 11:29:43 crc kubenswrapper[4838]: I0202 11:29:43.013676 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gmk9d_6201c1a8-a058-4029-ac96-17f4500b9fc0/ovsdb-server-init/0.log" Feb 02 11:29:43 crc kubenswrapper[4838]: I0202 11:29:43.232298 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gmk9d_6201c1a8-a058-4029-ac96-17f4500b9fc0/ovs-vswitchd/0.log" Feb 02 11:29:43 crc kubenswrapper[4838]: I0202 11:29:43.236609 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gmk9d_6201c1a8-a058-4029-ac96-17f4500b9fc0/ovsdb-server-init/0.log" Feb 02 11:29:43 crc kubenswrapper[4838]: I0202 11:29:43.274347 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gmk9d_6201c1a8-a058-4029-ac96-17f4500b9fc0/ovsdb-server/0.log" Feb 02 11:29:43 crc kubenswrapper[4838]: I0202 11:29:43.434716 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_6e6a9dbb-63ef-4cf2-b725-254ad752937d/openstack-network-exporter/0.log" Feb 02 11:29:43 crc kubenswrapper[4838]: I0202 11:29:43.481329 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_6e6a9dbb-63ef-4cf2-b725-254ad752937d/ovn-northd/0.log" Feb 02 11:29:43 crc kubenswrapper[4838]: I0202 11:29:43.503732 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_bbb8b7a6-28d0-40fa-bdcb-fe95357c8018/openstack-network-exporter/0.log" Feb 02 11:29:43 crc kubenswrapper[4838]: I0202 11:29:43.668820 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_bbb8b7a6-28d0-40fa-bdcb-fe95357c8018/ovsdbserver-nb/0.log" Feb 02 11:29:43 crc kubenswrapper[4838]: I0202 11:29:43.691652 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_a8af70b4-152f-4edb-a4c5-afc8baed3685/openstack-network-exporter/0.log" Feb 02 11:29:43 crc kubenswrapper[4838]: I0202 11:29:43.722598 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_a8af70b4-152f-4edb-a4c5-afc8baed3685/ovsdbserver-sb/0.log" Feb 02 11:29:43 crc kubenswrapper[4838]: I0202 11:29:43.876026 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-b4ffff5fb-46ldv_32a23a44-9dae-46da-9895-dcd512447d9c/placement-api/0.log" Feb 02 11:29:43 crc kubenswrapper[4838]: I0202 11:29:43.970493 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-b4ffff5fb-46ldv_32a23a44-9dae-46da-9895-dcd512447d9c/placement-log/0.log" Feb 02 11:29:44 crc kubenswrapper[4838]: I0202 11:29:44.100765 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_a21f4822-c0c1-4b73-bad3-ddf3552c9ebd/setup-container/0.log" Feb 02 11:29:44 crc kubenswrapper[4838]: I0202 11:29:44.274645 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_a21f4822-c0c1-4b73-bad3-ddf3552c9ebd/rabbitmq/0.log" Feb 02 11:29:44 crc kubenswrapper[4838]: I0202 11:29:44.321170 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c6516e19-8887-4dda-a635-bc93da2a19a6/setup-container/0.log" Feb 02 11:29:44 crc kubenswrapper[4838]: I0202 11:29:44.349634 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_a21f4822-c0c1-4b73-bad3-ddf3552c9ebd/setup-container/0.log" Feb 02 11:29:44 crc kubenswrapper[4838]: I0202 11:29:44.607371 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c6516e19-8887-4dda-a635-bc93da2a19a6/setup-container/0.log" Feb 02 11:29:44 crc kubenswrapper[4838]: I0202 11:29:44.612463 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c6516e19-8887-4dda-a635-bc93da2a19a6/rabbitmq/0.log" Feb 02 11:29:44 crc kubenswrapper[4838]: I0202 11:29:44.752294 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-655dd9ff5-m4vn6_f5af71cb-2380-4977-9a44-ece13d4ce18a/proxy-httpd/0.log" Feb 02 11:29:44 crc kubenswrapper[4838]: I0202 11:29:44.946265 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-655dd9ff5-m4vn6_f5af71cb-2380-4977-9a44-ece13d4ce18a/proxy-server/0.log" Feb 02 11:29:44 crc kubenswrapper[4838]: I0202 11:29:44.998214 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-524zj_582a43e1-d21a-4421-ae28-0eecd147d19e/swift-ring-rebalance/0.log" Feb 02 11:29:45 crc kubenswrapper[4838]: I0202 11:29:45.263938 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/account-reaper/0.log" Feb 02 11:29:45 crc kubenswrapper[4838]: I0202 11:29:45.284350 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/account-auditor/0.log" Feb 02 11:29:45 crc kubenswrapper[4838]: I0202 11:29:45.322932 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/account-replicator/0.log" Feb 02 11:29:45 crc kubenswrapper[4838]: I0202 11:29:45.405461 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/account-server/0.log" Feb 02 11:29:45 crc kubenswrapper[4838]: I0202 11:29:45.454857 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/container-auditor/0.log" Feb 02 11:29:45 crc kubenswrapper[4838]: I0202 11:29:45.513439 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/container-replicator/0.log" Feb 02 11:29:45 crc kubenswrapper[4838]: I0202 11:29:45.546640 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/container-server/0.log" Feb 02 11:29:45 crc kubenswrapper[4838]: I0202 11:29:45.620205 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/container-updater/0.log" Feb 02 11:29:45 crc kubenswrapper[4838]: I0202 11:29:45.671639 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/object-auditor/0.log" Feb 02 11:29:45 crc kubenswrapper[4838]: I0202 11:29:45.765974 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/object-replicator/0.log" Feb 02 11:29:45 crc kubenswrapper[4838]: I0202 11:29:45.769048 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/object-expirer/0.log" Feb 02 11:29:45 crc kubenswrapper[4838]: I0202 11:29:45.844838 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/object-server/0.log" Feb 02 11:29:45 crc kubenswrapper[4838]: I0202 11:29:45.881315 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/object-updater/0.log" Feb 02 11:29:45 crc kubenswrapper[4838]: I0202 11:29:45.988470 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/swift-recon-cron/0.log" Feb 02 11:29:46 crc kubenswrapper[4838]: I0202 11:29:46.014863 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/rsync/0.log" Feb 02 11:29:49 crc kubenswrapper[4838]: I0202 11:29:49.302164 4838 scope.go:117] "RemoveContainer" containerID="43c015336232612c23c346095e9d080a828e7b82c35111545a72241601c2a52f" Feb 02 11:29:49 crc kubenswrapper[4838]: I0202 11:29:49.356765 4838 scope.go:117] "RemoveContainer" containerID="1d5cbeee6e023cbfb8e09f59ee31635ff2d97ec33d9dd5982af2a2591a8392af" Feb 02 11:29:49 crc kubenswrapper[4838]: I0202 11:29:49.421557 4838 scope.go:117] "RemoveContainer" containerID="5d5e907869030a55ad318e1d933bff3c790413853b7dba2978c0d1386f6b4b5c" Feb 02 11:29:50 crc kubenswrapper[4838]: I0202 11:29:50.064016 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-5tpqb"] Feb 02 11:29:50 crc kubenswrapper[4838]: I0202 11:29:50.076699 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-5tpqb"] Feb 02 11:29:50 crc kubenswrapper[4838]: I0202 11:29:50.517766 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ce26605-8dfc-48cd-a362-1a37c67ea300" path="/var/lib/kubelet/pods/7ce26605-8dfc-48cd-a362-1a37c67ea300/volumes" Feb 02 11:29:52 crc kubenswrapper[4838]: I0202 11:29:52.802287 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_605eae6b-dbaf-4781-97bb-2ef09397141d/memcached/0.log" Feb 02 11:30:00 crc kubenswrapper[4838]: I0202 11:30:00.147081 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500530-m6dbf"] Feb 02 11:30:00 crc kubenswrapper[4838]: E0202 11:30:00.148069 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a7490c6-f070-4f41-bb03-601c411dc7ee" containerName="container-00" Feb 02 11:30:00 crc kubenswrapper[4838]: I0202 11:30:00.148086 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a7490c6-f070-4f41-bb03-601c411dc7ee" containerName="container-00" Feb 02 11:30:00 crc kubenswrapper[4838]: I0202 11:30:00.148334 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a7490c6-f070-4f41-bb03-601c411dc7ee" containerName="container-00" Feb 02 11:30:00 crc kubenswrapper[4838]: I0202 11:30:00.149193 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500530-m6dbf" Feb 02 11:30:00 crc kubenswrapper[4838]: I0202 11:30:00.151085 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 11:30:00 crc kubenswrapper[4838]: I0202 11:30:00.151312 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 11:30:00 crc kubenswrapper[4838]: I0202 11:30:00.160768 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500530-m6dbf"] Feb 02 11:30:00 crc kubenswrapper[4838]: I0202 11:30:00.260380 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/479bad84-d9ca-4089-8bf3-2f1eb5310ead-config-volume\") pod \"collect-profiles-29500530-m6dbf\" (UID: \"479bad84-d9ca-4089-8bf3-2f1eb5310ead\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500530-m6dbf" Feb 02 11:30:00 crc kubenswrapper[4838]: I0202 11:30:00.260459 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltmtz\" (UniqueName: \"kubernetes.io/projected/479bad84-d9ca-4089-8bf3-2f1eb5310ead-kube-api-access-ltmtz\") pod \"collect-profiles-29500530-m6dbf\" (UID: \"479bad84-d9ca-4089-8bf3-2f1eb5310ead\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500530-m6dbf" Feb 02 11:30:00 crc kubenswrapper[4838]: I0202 11:30:00.260604 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/479bad84-d9ca-4089-8bf3-2f1eb5310ead-secret-volume\") pod \"collect-profiles-29500530-m6dbf\" (UID: \"479bad84-d9ca-4089-8bf3-2f1eb5310ead\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500530-m6dbf" Feb 02 11:30:00 crc kubenswrapper[4838]: I0202 11:30:00.362357 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/479bad84-d9ca-4089-8bf3-2f1eb5310ead-secret-volume\") pod \"collect-profiles-29500530-m6dbf\" (UID: \"479bad84-d9ca-4089-8bf3-2f1eb5310ead\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500530-m6dbf" Feb 02 11:30:00 crc kubenswrapper[4838]: I0202 11:30:00.362454 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/479bad84-d9ca-4089-8bf3-2f1eb5310ead-config-volume\") pod \"collect-profiles-29500530-m6dbf\" (UID: \"479bad84-d9ca-4089-8bf3-2f1eb5310ead\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500530-m6dbf" Feb 02 11:30:00 crc kubenswrapper[4838]: I0202 11:30:00.362495 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltmtz\" (UniqueName: \"kubernetes.io/projected/479bad84-d9ca-4089-8bf3-2f1eb5310ead-kube-api-access-ltmtz\") pod \"collect-profiles-29500530-m6dbf\" (UID: \"479bad84-d9ca-4089-8bf3-2f1eb5310ead\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500530-m6dbf" Feb 02 11:30:00 crc kubenswrapper[4838]: I0202 11:30:00.363425 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/479bad84-d9ca-4089-8bf3-2f1eb5310ead-config-volume\") pod \"collect-profiles-29500530-m6dbf\" (UID: \"479bad84-d9ca-4089-8bf3-2f1eb5310ead\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500530-m6dbf" Feb 02 11:30:00 crc kubenswrapper[4838]: I0202 11:30:00.377996 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/479bad84-d9ca-4089-8bf3-2f1eb5310ead-secret-volume\") pod \"collect-profiles-29500530-m6dbf\" (UID: \"479bad84-d9ca-4089-8bf3-2f1eb5310ead\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500530-m6dbf" Feb 02 11:30:00 crc kubenswrapper[4838]: I0202 11:30:00.378186 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltmtz\" (UniqueName: \"kubernetes.io/projected/479bad84-d9ca-4089-8bf3-2f1eb5310ead-kube-api-access-ltmtz\") pod \"collect-profiles-29500530-m6dbf\" (UID: \"479bad84-d9ca-4089-8bf3-2f1eb5310ead\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500530-m6dbf" Feb 02 11:30:00 crc kubenswrapper[4838]: I0202 11:30:00.477476 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500530-m6dbf" Feb 02 11:30:00 crc kubenswrapper[4838]: I0202 11:30:00.937577 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500530-m6dbf"] Feb 02 11:30:00 crc kubenswrapper[4838]: W0202 11:30:00.938967 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod479bad84_d9ca_4089_8bf3_2f1eb5310ead.slice/crio-a2f7d8aecc3b26235e3e7f5826a62d7f898fcb7dce603ea506ea2e25f88bf7bf WatchSource:0}: Error finding container a2f7d8aecc3b26235e3e7f5826a62d7f898fcb7dce603ea506ea2e25f88bf7bf: Status 404 returned error can't find the container with id a2f7d8aecc3b26235e3e7f5826a62d7f898fcb7dce603ea506ea2e25f88bf7bf Feb 02 11:30:01 crc kubenswrapper[4838]: I0202 11:30:01.774879 4838 generic.go:334] "Generic (PLEG): container finished" podID="479bad84-d9ca-4089-8bf3-2f1eb5310ead" containerID="5971ccca78200e98890edfdefd9295acb1dd53d365cc509709dea1a663712427" exitCode=0 Feb 02 11:30:01 crc kubenswrapper[4838]: I0202 11:30:01.775143 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500530-m6dbf" event={"ID":"479bad84-d9ca-4089-8bf3-2f1eb5310ead","Type":"ContainerDied","Data":"5971ccca78200e98890edfdefd9295acb1dd53d365cc509709dea1a663712427"} Feb 02 11:30:01 crc kubenswrapper[4838]: I0202 11:30:01.775169 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500530-m6dbf" event={"ID":"479bad84-d9ca-4089-8bf3-2f1eb5310ead","Type":"ContainerStarted","Data":"a2f7d8aecc3b26235e3e7f5826a62d7f898fcb7dce603ea506ea2e25f88bf7bf"} Feb 02 11:30:03 crc kubenswrapper[4838]: I0202 11:30:03.143200 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500530-m6dbf" Feb 02 11:30:03 crc kubenswrapper[4838]: I0202 11:30:03.313434 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltmtz\" (UniqueName: \"kubernetes.io/projected/479bad84-d9ca-4089-8bf3-2f1eb5310ead-kube-api-access-ltmtz\") pod \"479bad84-d9ca-4089-8bf3-2f1eb5310ead\" (UID: \"479bad84-d9ca-4089-8bf3-2f1eb5310ead\") " Feb 02 11:30:03 crc kubenswrapper[4838]: I0202 11:30:03.313574 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/479bad84-d9ca-4089-8bf3-2f1eb5310ead-config-volume\") pod \"479bad84-d9ca-4089-8bf3-2f1eb5310ead\" (UID: \"479bad84-d9ca-4089-8bf3-2f1eb5310ead\") " Feb 02 11:30:03 crc kubenswrapper[4838]: I0202 11:30:03.314310 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/479bad84-d9ca-4089-8bf3-2f1eb5310ead-config-volume" (OuterVolumeSpecName: "config-volume") pod "479bad84-d9ca-4089-8bf3-2f1eb5310ead" (UID: "479bad84-d9ca-4089-8bf3-2f1eb5310ead"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:30:03 crc kubenswrapper[4838]: I0202 11:30:03.314359 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/479bad84-d9ca-4089-8bf3-2f1eb5310ead-secret-volume\") pod \"479bad84-d9ca-4089-8bf3-2f1eb5310ead\" (UID: \"479bad84-d9ca-4089-8bf3-2f1eb5310ead\") " Feb 02 11:30:03 crc kubenswrapper[4838]: I0202 11:30:03.315190 4838 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/479bad84-d9ca-4089-8bf3-2f1eb5310ead-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 11:30:03 crc kubenswrapper[4838]: I0202 11:30:03.320799 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/479bad84-d9ca-4089-8bf3-2f1eb5310ead-kube-api-access-ltmtz" (OuterVolumeSpecName: "kube-api-access-ltmtz") pod "479bad84-d9ca-4089-8bf3-2f1eb5310ead" (UID: "479bad84-d9ca-4089-8bf3-2f1eb5310ead"). InnerVolumeSpecName "kube-api-access-ltmtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:30:03 crc kubenswrapper[4838]: I0202 11:30:03.331749 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/479bad84-d9ca-4089-8bf3-2f1eb5310ead-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "479bad84-d9ca-4089-8bf3-2f1eb5310ead" (UID: "479bad84-d9ca-4089-8bf3-2f1eb5310ead"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:30:03 crc kubenswrapper[4838]: I0202 11:30:03.416747 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltmtz\" (UniqueName: \"kubernetes.io/projected/479bad84-d9ca-4089-8bf3-2f1eb5310ead-kube-api-access-ltmtz\") on node \"crc\" DevicePath \"\"" Feb 02 11:30:03 crc kubenswrapper[4838]: I0202 11:30:03.416795 4838 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/479bad84-d9ca-4089-8bf3-2f1eb5310ead-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 11:30:03 crc kubenswrapper[4838]: I0202 11:30:03.791193 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500530-m6dbf" event={"ID":"479bad84-d9ca-4089-8bf3-2f1eb5310ead","Type":"ContainerDied","Data":"a2f7d8aecc3b26235e3e7f5826a62d7f898fcb7dce603ea506ea2e25f88bf7bf"} Feb 02 11:30:03 crc kubenswrapper[4838]: I0202 11:30:03.791485 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2f7d8aecc3b26235e3e7f5826a62d7f898fcb7dce603ea506ea2e25f88bf7bf" Feb 02 11:30:03 crc kubenswrapper[4838]: I0202 11:30:03.791246 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500530-m6dbf" Feb 02 11:30:04 crc kubenswrapper[4838]: I0202 11:30:04.217685 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj"] Feb 02 11:30:04 crc kubenswrapper[4838]: I0202 11:30:04.224174 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500485-q5vqj"] Feb 02 11:30:04 crc kubenswrapper[4838]: I0202 11:30:04.519242 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fa1c8c4-4ea6-484c-906a-6e7c8016757b" path="/var/lib/kubelet/pods/4fa1c8c4-4ea6-484c-906a-6e7c8016757b/volumes" Feb 02 11:30:05 crc kubenswrapper[4838]: I0202 11:30:05.683572 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vww22"] Feb 02 11:30:05 crc kubenswrapper[4838]: E0202 11:30:05.684224 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="479bad84-d9ca-4089-8bf3-2f1eb5310ead" containerName="collect-profiles" Feb 02 11:30:05 crc kubenswrapper[4838]: I0202 11:30:05.684236 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="479bad84-d9ca-4089-8bf3-2f1eb5310ead" containerName="collect-profiles" Feb 02 11:30:05 crc kubenswrapper[4838]: I0202 11:30:05.684434 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="479bad84-d9ca-4089-8bf3-2f1eb5310ead" containerName="collect-profiles" Feb 02 11:30:05 crc kubenswrapper[4838]: I0202 11:30:05.685881 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vww22" Feb 02 11:30:05 crc kubenswrapper[4838]: I0202 11:30:05.702246 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vww22"] Feb 02 11:30:05 crc kubenswrapper[4838]: I0202 11:30:05.860311 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d35866d-90e1-4309-ab96-a1135cd809b6-catalog-content\") pod \"certified-operators-vww22\" (UID: \"2d35866d-90e1-4309-ab96-a1135cd809b6\") " pod="openshift-marketplace/certified-operators-vww22" Feb 02 11:30:05 crc kubenswrapper[4838]: I0202 11:30:05.860508 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d35866d-90e1-4309-ab96-a1135cd809b6-utilities\") pod \"certified-operators-vww22\" (UID: \"2d35866d-90e1-4309-ab96-a1135cd809b6\") " pod="openshift-marketplace/certified-operators-vww22" Feb 02 11:30:05 crc kubenswrapper[4838]: I0202 11:30:05.860685 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vfs7\" (UniqueName: \"kubernetes.io/projected/2d35866d-90e1-4309-ab96-a1135cd809b6-kube-api-access-6vfs7\") pod \"certified-operators-vww22\" (UID: \"2d35866d-90e1-4309-ab96-a1135cd809b6\") " pod="openshift-marketplace/certified-operators-vww22" Feb 02 11:30:05 crc kubenswrapper[4838]: I0202 11:30:05.962462 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d35866d-90e1-4309-ab96-a1135cd809b6-catalog-content\") pod \"certified-operators-vww22\" (UID: \"2d35866d-90e1-4309-ab96-a1135cd809b6\") " pod="openshift-marketplace/certified-operators-vww22" Feb 02 11:30:05 crc kubenswrapper[4838]: I0202 11:30:05.962660 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d35866d-90e1-4309-ab96-a1135cd809b6-utilities\") pod \"certified-operators-vww22\" (UID: \"2d35866d-90e1-4309-ab96-a1135cd809b6\") " pod="openshift-marketplace/certified-operators-vww22" Feb 02 11:30:05 crc kubenswrapper[4838]: I0202 11:30:05.962766 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vfs7\" (UniqueName: \"kubernetes.io/projected/2d35866d-90e1-4309-ab96-a1135cd809b6-kube-api-access-6vfs7\") pod \"certified-operators-vww22\" (UID: \"2d35866d-90e1-4309-ab96-a1135cd809b6\") " pod="openshift-marketplace/certified-operators-vww22" Feb 02 11:30:05 crc kubenswrapper[4838]: I0202 11:30:05.963080 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d35866d-90e1-4309-ab96-a1135cd809b6-catalog-content\") pod \"certified-operators-vww22\" (UID: \"2d35866d-90e1-4309-ab96-a1135cd809b6\") " pod="openshift-marketplace/certified-operators-vww22" Feb 02 11:30:05 crc kubenswrapper[4838]: I0202 11:30:05.963197 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d35866d-90e1-4309-ab96-a1135cd809b6-utilities\") pod \"certified-operators-vww22\" (UID: \"2d35866d-90e1-4309-ab96-a1135cd809b6\") " pod="openshift-marketplace/certified-operators-vww22" Feb 02 11:30:05 crc kubenswrapper[4838]: I0202 11:30:05.984147 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vfs7\" (UniqueName: \"kubernetes.io/projected/2d35866d-90e1-4309-ab96-a1135cd809b6-kube-api-access-6vfs7\") pod \"certified-operators-vww22\" (UID: \"2d35866d-90e1-4309-ab96-a1135cd809b6\") " pod="openshift-marketplace/certified-operators-vww22" Feb 02 11:30:06 crc kubenswrapper[4838]: I0202 11:30:06.014662 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vww22" Feb 02 11:30:06 crc kubenswrapper[4838]: I0202 11:30:06.546751 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vww22"] Feb 02 11:30:06 crc kubenswrapper[4838]: I0202 11:30:06.820964 4838 generic.go:334] "Generic (PLEG): container finished" podID="2d35866d-90e1-4309-ab96-a1135cd809b6" containerID="ed49cf7d8a85505822bbbec1bfa1d5cf58b6cd6f205a50b26b8a32aeaab50f54" exitCode=0 Feb 02 11:30:06 crc kubenswrapper[4838]: I0202 11:30:06.821012 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vww22" event={"ID":"2d35866d-90e1-4309-ab96-a1135cd809b6","Type":"ContainerDied","Data":"ed49cf7d8a85505822bbbec1bfa1d5cf58b6cd6f205a50b26b8a32aeaab50f54"} Feb 02 11:30:06 crc kubenswrapper[4838]: I0202 11:30:06.821039 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vww22" event={"ID":"2d35866d-90e1-4309-ab96-a1135cd809b6","Type":"ContainerStarted","Data":"78431e647f74b2d10d864d537da55fbeff1e07cd9e8227f5589f2b4932548a1f"} Feb 02 11:30:07 crc kubenswrapper[4838]: I0202 11:30:07.829264 4838 generic.go:334] "Generic (PLEG): container finished" podID="2d35866d-90e1-4309-ab96-a1135cd809b6" containerID="6c667c15ccea8ca00a49855f4bd948775994355eb8c9ee47e8f56c5e34e2a297" exitCode=0 Feb 02 11:30:07 crc kubenswrapper[4838]: I0202 11:30:07.829342 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vww22" event={"ID":"2d35866d-90e1-4309-ab96-a1135cd809b6","Type":"ContainerDied","Data":"6c667c15ccea8ca00a49855f4bd948775994355eb8c9ee47e8f56c5e34e2a297"} Feb 02 11:30:08 crc kubenswrapper[4838]: I0202 11:30:08.845182 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vww22" event={"ID":"2d35866d-90e1-4309-ab96-a1135cd809b6","Type":"ContainerStarted","Data":"24f567454e9e67c514e2f393f9afe8b813bed8451dab690ef82d199ba9504423"} Feb 02 11:30:08 crc kubenswrapper[4838]: I0202 11:30:08.865390 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vww22" podStartSLOduration=2.440118892 podStartE2EDuration="3.865364427s" podCreationTimestamp="2026-02-02 11:30:05 +0000 UTC" firstStartedPulling="2026-02-02 11:30:06.824362804 +0000 UTC m=+2201.161463832" lastFinishedPulling="2026-02-02 11:30:08.249608339 +0000 UTC m=+2202.586709367" observedRunningTime="2026-02-02 11:30:08.865016597 +0000 UTC m=+2203.202117625" watchObservedRunningTime="2026-02-02 11:30:08.865364427 +0000 UTC m=+2203.202465465" Feb 02 11:30:10 crc kubenswrapper[4838]: I0202 11:30:10.483772 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97_a0ffc923-92b7-4528-963f-bb993ecb20c1/util/0.log" Feb 02 11:30:10 crc kubenswrapper[4838]: I0202 11:30:10.683111 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97_a0ffc923-92b7-4528-963f-bb993ecb20c1/util/0.log" Feb 02 11:30:10 crc kubenswrapper[4838]: I0202 11:30:10.755657 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97_a0ffc923-92b7-4528-963f-bb993ecb20c1/pull/0.log" Feb 02 11:30:10 crc kubenswrapper[4838]: I0202 11:30:10.755800 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97_a0ffc923-92b7-4528-963f-bb993ecb20c1/pull/0.log" Feb 02 11:30:10 crc kubenswrapper[4838]: I0202 11:30:10.916175 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97_a0ffc923-92b7-4528-963f-bb993ecb20c1/util/0.log" Feb 02 11:30:10 crc kubenswrapper[4838]: I0202 11:30:10.921811 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97_a0ffc923-92b7-4528-963f-bb993ecb20c1/pull/0.log" Feb 02 11:30:10 crc kubenswrapper[4838]: I0202 11:30:10.999200 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97_a0ffc923-92b7-4528-963f-bb993ecb20c1/extract/0.log" Feb 02 11:30:11 crc kubenswrapper[4838]: I0202 11:30:11.233005 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7b6c4d8c5f-db2x5_cb4f687b-4b19-447b-beb4-1646c2a40800/manager/0.log" Feb 02 11:30:11 crc kubenswrapper[4838]: I0202 11:30:11.238704 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-8d874c8fc-hm4jh_13ab41db-f38e-4980-89f9-361236526dfa/manager/0.log" Feb 02 11:30:11 crc kubenswrapper[4838]: I0202 11:30:11.385938 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-6d9697b7f4-rs64q_e358aab4-cbb0-4522-8740-6646b7fdcabd/manager/0.log" Feb 02 11:30:11 crc kubenswrapper[4838]: I0202 11:30:11.706085 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-8886f4c47-xftmv_084b46d2-88a9-42e4-83b2-dbccf264aafe/manager/0.log" Feb 02 11:30:11 crc kubenswrapper[4838]: I0202 11:30:11.855475 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-69d6db494d-qrf72_9cfe65eb-c657-4f96-b48f-1c9831fd75ba/manager/0.log" Feb 02 11:30:11 crc kubenswrapper[4838]: I0202 11:30:11.937498 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5fb775575f-2kqdq_ccd2dd8e-7b67-4b94-9b9a-b76fab87903c/manager/0.log" Feb 02 11:30:12 crc kubenswrapper[4838]: I0202 11:30:12.272409 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7c6b8858cc-lk5ts_79dd465e-2e36-423e-af5b-f41d715c0297/manager/0.log" Feb 02 11:30:12 crc kubenswrapper[4838]: I0202 11:30:12.419783 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-84f48565d4-tn9zr_4b7d42b0-25f5-40d4-8deb-34841b6c8c92/manager/0.log" Feb 02 11:30:12 crc kubenswrapper[4838]: I0202 11:30:12.476459 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-79955696d6-thn2f_5e0647d6-93ed-40f1-a522-f5ecf769dd14/manager/0.log" Feb 02 11:30:12 crc kubenswrapper[4838]: I0202 11:30:12.518093 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7dd968899f-5s687_75858b44-9a09-43f3-8de5-8ae999ae2657/manager/0.log" Feb 02 11:30:12 crc kubenswrapper[4838]: I0202 11:30:12.677820 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf948998-q6xd2_8291636d-bcda-4171-825a-f0f3c73b1320/manager/0.log" Feb 02 11:30:12 crc kubenswrapper[4838]: I0202 11:30:12.853375 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-585dbc889-52b6h_53860709-50fb-44d9-910b-d4142608d8d8/manager/0.log" Feb 02 11:30:12 crc kubenswrapper[4838]: I0202 11:30:12.968413 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-55bff696bd-pms7g_a17b67e7-df64-4f12-8e78-c52068d2b1df/manager/0.log" Feb 02 11:30:13 crc kubenswrapper[4838]: I0202 11:30:13.079537 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6687f8d877-hp6js_382292e1-fda7-4ab5-91e7-cf4ade4d6363/manager/0.log" Feb 02 11:30:13 crc kubenswrapper[4838]: I0202 11:30:13.177801 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj_0a8916a2-6c71-4678-9a42-23b82b72f891/manager/0.log" Feb 02 11:30:13 crc kubenswrapper[4838]: I0202 11:30:13.440818 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-6d5fdc6cdc-dvqjc_228bb6dc-ac19-4dd3-aaa7-265cc00de1c9/operator/0.log" Feb 02 11:30:13 crc kubenswrapper[4838]: I0202 11:30:13.550760 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-svvg4_11d5435a-6771-4a54-b3f1-1f4f6bd2c123/registry-server/0.log" Feb 02 11:30:13 crc kubenswrapper[4838]: I0202 11:30:13.740448 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-788c46999f-lbjxl_e40ca74c-361a-4102-b7de-35464bb8821b/manager/0.log" Feb 02 11:30:13 crc kubenswrapper[4838]: I0202 11:30:13.917863 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5b964cf4cd-kbj4l_7ebd9e27-5249-4c31-86cd-200ec9c3b852/manager/0.log" Feb 02 11:30:14 crc kubenswrapper[4838]: I0202 11:30:14.114254 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-pljzb_a158eb1e-69b8-48ad-8061-a3e503981572/operator/0.log" Feb 02 11:30:14 crc kubenswrapper[4838]: I0202 11:30:14.303527 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-68fc8c869-8l8mb_d960011d-30b7-4eb4-9e06-1b8b9aa0a114/manager/0.log" Feb 02 11:30:14 crc kubenswrapper[4838]: I0202 11:30:14.427413 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-5dcd749f76-wzqhl_83e1d0f5-af2b-4c12-abbd-712e18108a24/manager/0.log" Feb 02 11:30:14 crc kubenswrapper[4838]: I0202 11:30:14.486061 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-64b5b76f97-4d7p8_5e7863af-65e8-4d89-a434-fac6c13414cc/manager/0.log" Feb 02 11:30:14 crc kubenswrapper[4838]: I0202 11:30:14.616085 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-56f8bfcd9f-5xdv4_27825541-2816-4017-bba1-0f6f5946bb3c/manager/0.log" Feb 02 11:30:14 crc kubenswrapper[4838]: I0202 11:30:14.700795 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-564965969-h6lq5_149430e7-7b6d-44d0-a474-944271e7bb5e/manager/0.log" Feb 02 11:30:16 crc kubenswrapper[4838]: I0202 11:30:16.819449 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vww22" Feb 02 11:30:16 crc kubenswrapper[4838]: I0202 11:30:16.819484 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-9ln9l"] Feb 02 11:30:16 crc kubenswrapper[4838]: I0202 11:30:16.819499 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-9ln9l"] Feb 02 11:30:16 crc kubenswrapper[4838]: I0202 11:30:16.819515 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vww22" Feb 02 11:30:16 crc kubenswrapper[4838]: I0202 11:30:16.835038 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vww22" Feb 02 11:30:17 crc kubenswrapper[4838]: I0202 11:30:17.863278 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vww22" Feb 02 11:30:17 crc kubenswrapper[4838]: I0202 11:30:17.923348 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vww22"] Feb 02 11:30:18 crc kubenswrapper[4838]: I0202 11:30:18.521770 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6ecc1cc-2175-4372-ae32-61761f66a342" path="/var/lib/kubelet/pods/b6ecc1cc-2175-4372-ae32-61761f66a342/volumes" Feb 02 11:30:19 crc kubenswrapper[4838]: I0202 11:30:19.519322 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wc9p6"] Feb 02 11:30:19 crc kubenswrapper[4838]: I0202 11:30:19.521152 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wc9p6" Feb 02 11:30:19 crc kubenswrapper[4838]: I0202 11:30:19.530533 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wc9p6"] Feb 02 11:30:19 crc kubenswrapper[4838]: I0202 11:30:19.565389 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5711aae2-4e88-4dcc-8736-0fb7ca22aef8-utilities\") pod \"community-operators-wc9p6\" (UID: \"5711aae2-4e88-4dcc-8736-0fb7ca22aef8\") " pod="openshift-marketplace/community-operators-wc9p6" Feb 02 11:30:19 crc kubenswrapper[4838]: I0202 11:30:19.565487 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtq2l\" (UniqueName: \"kubernetes.io/projected/5711aae2-4e88-4dcc-8736-0fb7ca22aef8-kube-api-access-xtq2l\") pod \"community-operators-wc9p6\" (UID: \"5711aae2-4e88-4dcc-8736-0fb7ca22aef8\") " pod="openshift-marketplace/community-operators-wc9p6" Feb 02 11:30:19 crc kubenswrapper[4838]: I0202 11:30:19.565590 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5711aae2-4e88-4dcc-8736-0fb7ca22aef8-catalog-content\") pod \"community-operators-wc9p6\" (UID: \"5711aae2-4e88-4dcc-8736-0fb7ca22aef8\") " pod="openshift-marketplace/community-operators-wc9p6" Feb 02 11:30:19 crc kubenswrapper[4838]: I0202 11:30:19.667531 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5711aae2-4e88-4dcc-8736-0fb7ca22aef8-utilities\") pod \"community-operators-wc9p6\" (UID: \"5711aae2-4e88-4dcc-8736-0fb7ca22aef8\") " pod="openshift-marketplace/community-operators-wc9p6" Feb 02 11:30:19 crc kubenswrapper[4838]: I0202 11:30:19.667919 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtq2l\" (UniqueName: \"kubernetes.io/projected/5711aae2-4e88-4dcc-8736-0fb7ca22aef8-kube-api-access-xtq2l\") pod \"community-operators-wc9p6\" (UID: \"5711aae2-4e88-4dcc-8736-0fb7ca22aef8\") " pod="openshift-marketplace/community-operators-wc9p6" Feb 02 11:30:19 crc kubenswrapper[4838]: I0202 11:30:19.668031 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5711aae2-4e88-4dcc-8736-0fb7ca22aef8-catalog-content\") pod \"community-operators-wc9p6\" (UID: \"5711aae2-4e88-4dcc-8736-0fb7ca22aef8\") " pod="openshift-marketplace/community-operators-wc9p6" Feb 02 11:30:19 crc kubenswrapper[4838]: I0202 11:30:19.668117 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5711aae2-4e88-4dcc-8736-0fb7ca22aef8-utilities\") pod \"community-operators-wc9p6\" (UID: \"5711aae2-4e88-4dcc-8736-0fb7ca22aef8\") " pod="openshift-marketplace/community-operators-wc9p6" Feb 02 11:30:19 crc kubenswrapper[4838]: I0202 11:30:19.668395 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5711aae2-4e88-4dcc-8736-0fb7ca22aef8-catalog-content\") pod \"community-operators-wc9p6\" (UID: \"5711aae2-4e88-4dcc-8736-0fb7ca22aef8\") " pod="openshift-marketplace/community-operators-wc9p6" Feb 02 11:30:19 crc kubenswrapper[4838]: I0202 11:30:19.686125 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtq2l\" (UniqueName: \"kubernetes.io/projected/5711aae2-4e88-4dcc-8736-0fb7ca22aef8-kube-api-access-xtq2l\") pod \"community-operators-wc9p6\" (UID: \"5711aae2-4e88-4dcc-8736-0fb7ca22aef8\") " pod="openshift-marketplace/community-operators-wc9p6" Feb 02 11:30:19 crc kubenswrapper[4838]: I0202 11:30:19.830319 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vww22" podUID="2d35866d-90e1-4309-ab96-a1135cd809b6" containerName="registry-server" containerID="cri-o://24f567454e9e67c514e2f393f9afe8b813bed8451dab690ef82d199ba9504423" gracePeriod=2 Feb 02 11:30:19 crc kubenswrapper[4838]: I0202 11:30:19.841846 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wc9p6" Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.393958 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vww22" Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.405319 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wc9p6"] Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.488908 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d35866d-90e1-4309-ab96-a1135cd809b6-utilities\") pod \"2d35866d-90e1-4309-ab96-a1135cd809b6\" (UID: \"2d35866d-90e1-4309-ab96-a1135cd809b6\") " Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.489299 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d35866d-90e1-4309-ab96-a1135cd809b6-catalog-content\") pod \"2d35866d-90e1-4309-ab96-a1135cd809b6\" (UID: \"2d35866d-90e1-4309-ab96-a1135cd809b6\") " Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.489603 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d35866d-90e1-4309-ab96-a1135cd809b6-utilities" (OuterVolumeSpecName: "utilities") pod "2d35866d-90e1-4309-ab96-a1135cd809b6" (UID: "2d35866d-90e1-4309-ab96-a1135cd809b6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.495975 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vfs7\" (UniqueName: \"kubernetes.io/projected/2d35866d-90e1-4309-ab96-a1135cd809b6-kube-api-access-6vfs7\") pod \"2d35866d-90e1-4309-ab96-a1135cd809b6\" (UID: \"2d35866d-90e1-4309-ab96-a1135cd809b6\") " Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.496647 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d35866d-90e1-4309-ab96-a1135cd809b6-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.502562 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d35866d-90e1-4309-ab96-a1135cd809b6-kube-api-access-6vfs7" (OuterVolumeSpecName: "kube-api-access-6vfs7") pod "2d35866d-90e1-4309-ab96-a1135cd809b6" (UID: "2d35866d-90e1-4309-ab96-a1135cd809b6"). InnerVolumeSpecName "kube-api-access-6vfs7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.537680 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d35866d-90e1-4309-ab96-a1135cd809b6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2d35866d-90e1-4309-ab96-a1135cd809b6" (UID: "2d35866d-90e1-4309-ab96-a1135cd809b6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.598424 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d35866d-90e1-4309-ab96-a1135cd809b6-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.598454 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vfs7\" (UniqueName: \"kubernetes.io/projected/2d35866d-90e1-4309-ab96-a1135cd809b6-kube-api-access-6vfs7\") on node \"crc\" DevicePath \"\"" Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.839328 4838 generic.go:334] "Generic (PLEG): container finished" podID="2d35866d-90e1-4309-ab96-a1135cd809b6" containerID="24f567454e9e67c514e2f393f9afe8b813bed8451dab690ef82d199ba9504423" exitCode=0 Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.839388 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vww22" event={"ID":"2d35866d-90e1-4309-ab96-a1135cd809b6","Type":"ContainerDied","Data":"24f567454e9e67c514e2f393f9afe8b813bed8451dab690ef82d199ba9504423"} Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.839426 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vww22" event={"ID":"2d35866d-90e1-4309-ab96-a1135cd809b6","Type":"ContainerDied","Data":"78431e647f74b2d10d864d537da55fbeff1e07cd9e8227f5589f2b4932548a1f"} Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.839432 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vww22" Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.839444 4838 scope.go:117] "RemoveContainer" containerID="24f567454e9e67c514e2f393f9afe8b813bed8451dab690ef82d199ba9504423" Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.846156 4838 generic.go:334] "Generic (PLEG): container finished" podID="5711aae2-4e88-4dcc-8736-0fb7ca22aef8" containerID="4f837c208af5f6feb6498c12df29171a39a380e91db9e0146bab7b999c43253a" exitCode=0 Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.846192 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wc9p6" event={"ID":"5711aae2-4e88-4dcc-8736-0fb7ca22aef8","Type":"ContainerDied","Data":"4f837c208af5f6feb6498c12df29171a39a380e91db9e0146bab7b999c43253a"} Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.846213 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wc9p6" event={"ID":"5711aae2-4e88-4dcc-8736-0fb7ca22aef8","Type":"ContainerStarted","Data":"684bcf7023ca6b8f9b319a9574960aad099a467154ebd05536078c7ac83c01c4"} Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.872149 4838 scope.go:117] "RemoveContainer" containerID="6c667c15ccea8ca00a49855f4bd948775994355eb8c9ee47e8f56c5e34e2a297" Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.911683 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vww22"] Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.917357 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vww22"] Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.933484 4838 scope.go:117] "RemoveContainer" containerID="ed49cf7d8a85505822bbbec1bfa1d5cf58b6cd6f205a50b26b8a32aeaab50f54" Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.973516 4838 scope.go:117] "RemoveContainer" containerID="24f567454e9e67c514e2f393f9afe8b813bed8451dab690ef82d199ba9504423" Feb 02 11:30:20 crc kubenswrapper[4838]: E0202 11:30:20.973853 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24f567454e9e67c514e2f393f9afe8b813bed8451dab690ef82d199ba9504423\": container with ID starting with 24f567454e9e67c514e2f393f9afe8b813bed8451dab690ef82d199ba9504423 not found: ID does not exist" containerID="24f567454e9e67c514e2f393f9afe8b813bed8451dab690ef82d199ba9504423" Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.973893 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24f567454e9e67c514e2f393f9afe8b813bed8451dab690ef82d199ba9504423"} err="failed to get container status \"24f567454e9e67c514e2f393f9afe8b813bed8451dab690ef82d199ba9504423\": rpc error: code = NotFound desc = could not find container \"24f567454e9e67c514e2f393f9afe8b813bed8451dab690ef82d199ba9504423\": container with ID starting with 24f567454e9e67c514e2f393f9afe8b813bed8451dab690ef82d199ba9504423 not found: ID does not exist" Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.973933 4838 scope.go:117] "RemoveContainer" containerID="6c667c15ccea8ca00a49855f4bd948775994355eb8c9ee47e8f56c5e34e2a297" Feb 02 11:30:20 crc kubenswrapper[4838]: E0202 11:30:20.974157 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c667c15ccea8ca00a49855f4bd948775994355eb8c9ee47e8f56c5e34e2a297\": container with ID starting with 6c667c15ccea8ca00a49855f4bd948775994355eb8c9ee47e8f56c5e34e2a297 not found: ID does not exist" containerID="6c667c15ccea8ca00a49855f4bd948775994355eb8c9ee47e8f56c5e34e2a297" Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.974181 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c667c15ccea8ca00a49855f4bd948775994355eb8c9ee47e8f56c5e34e2a297"} err="failed to get container status \"6c667c15ccea8ca00a49855f4bd948775994355eb8c9ee47e8f56c5e34e2a297\": rpc error: code = NotFound desc = could not find container \"6c667c15ccea8ca00a49855f4bd948775994355eb8c9ee47e8f56c5e34e2a297\": container with ID starting with 6c667c15ccea8ca00a49855f4bd948775994355eb8c9ee47e8f56c5e34e2a297 not found: ID does not exist" Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.974196 4838 scope.go:117] "RemoveContainer" containerID="ed49cf7d8a85505822bbbec1bfa1d5cf58b6cd6f205a50b26b8a32aeaab50f54" Feb 02 11:30:20 crc kubenswrapper[4838]: E0202 11:30:20.974927 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed49cf7d8a85505822bbbec1bfa1d5cf58b6cd6f205a50b26b8a32aeaab50f54\": container with ID starting with ed49cf7d8a85505822bbbec1bfa1d5cf58b6cd6f205a50b26b8a32aeaab50f54 not found: ID does not exist" containerID="ed49cf7d8a85505822bbbec1bfa1d5cf58b6cd6f205a50b26b8a32aeaab50f54" Feb 02 11:30:20 crc kubenswrapper[4838]: I0202 11:30:20.974956 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed49cf7d8a85505822bbbec1bfa1d5cf58b6cd6f205a50b26b8a32aeaab50f54"} err="failed to get container status \"ed49cf7d8a85505822bbbec1bfa1d5cf58b6cd6f205a50b26b8a32aeaab50f54\": rpc error: code = NotFound desc = could not find container \"ed49cf7d8a85505822bbbec1bfa1d5cf58b6cd6f205a50b26b8a32aeaab50f54\": container with ID starting with ed49cf7d8a85505822bbbec1bfa1d5cf58b6cd6f205a50b26b8a32aeaab50f54 not found: ID does not exist" Feb 02 11:30:21 crc kubenswrapper[4838]: I0202 11:30:21.857093 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wc9p6" event={"ID":"5711aae2-4e88-4dcc-8736-0fb7ca22aef8","Type":"ContainerStarted","Data":"cf5ca8cf3656d04775bbb8c6120f27d514e3f7ab468858bf5040ebdbce772214"} Feb 02 11:30:22 crc kubenswrapper[4838]: I0202 11:30:22.516262 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d35866d-90e1-4309-ab96-a1135cd809b6" path="/var/lib/kubelet/pods/2d35866d-90e1-4309-ab96-a1135cd809b6/volumes" Feb 02 11:30:22 crc kubenswrapper[4838]: I0202 11:30:22.866326 4838 generic.go:334] "Generic (PLEG): container finished" podID="5711aae2-4e88-4dcc-8736-0fb7ca22aef8" containerID="cf5ca8cf3656d04775bbb8c6120f27d514e3f7ab468858bf5040ebdbce772214" exitCode=0 Feb 02 11:30:22 crc kubenswrapper[4838]: I0202 11:30:22.866390 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wc9p6" event={"ID":"5711aae2-4e88-4dcc-8736-0fb7ca22aef8","Type":"ContainerDied","Data":"cf5ca8cf3656d04775bbb8c6120f27d514e3f7ab468858bf5040ebdbce772214"} Feb 02 11:30:23 crc kubenswrapper[4838]: I0202 11:30:23.895097 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wc9p6" event={"ID":"5711aae2-4e88-4dcc-8736-0fb7ca22aef8","Type":"ContainerStarted","Data":"b243463f5a854ce17f3fb46fbdb1f459adf5600c52da55f8d8b543b6c8df1ad8"} Feb 02 11:30:23 crc kubenswrapper[4838]: I0202 11:30:23.934449 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wc9p6" podStartSLOduration=2.487700565 podStartE2EDuration="4.934428109s" podCreationTimestamp="2026-02-02 11:30:19 +0000 UTC" firstStartedPulling="2026-02-02 11:30:20.871577428 +0000 UTC m=+2215.208678456" lastFinishedPulling="2026-02-02 11:30:23.318304972 +0000 UTC m=+2217.655406000" observedRunningTime="2026-02-02 11:30:23.914173184 +0000 UTC m=+2218.251274212" watchObservedRunningTime="2026-02-02 11:30:23.934428109 +0000 UTC m=+2218.271529147" Feb 02 11:30:29 crc kubenswrapper[4838]: I0202 11:30:29.842165 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wc9p6" Feb 02 11:30:29 crc kubenswrapper[4838]: I0202 11:30:29.842722 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wc9p6" Feb 02 11:30:29 crc kubenswrapper[4838]: I0202 11:30:29.893678 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wc9p6" Feb 02 11:30:30 crc kubenswrapper[4838]: I0202 11:30:29.994699 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wc9p6" Feb 02 11:30:30 crc kubenswrapper[4838]: I0202 11:30:30.136996 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wc9p6"] Feb 02 11:30:31 crc kubenswrapper[4838]: I0202 11:30:31.956145 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wc9p6" podUID="5711aae2-4e88-4dcc-8736-0fb7ca22aef8" containerName="registry-server" containerID="cri-o://b243463f5a854ce17f3fb46fbdb1f459adf5600c52da55f8d8b543b6c8df1ad8" gracePeriod=2 Feb 02 11:30:32 crc kubenswrapper[4838]: I0202 11:30:32.460851 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wc9p6" Feb 02 11:30:32 crc kubenswrapper[4838]: I0202 11:30:32.633040 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5711aae2-4e88-4dcc-8736-0fb7ca22aef8-utilities\") pod \"5711aae2-4e88-4dcc-8736-0fb7ca22aef8\" (UID: \"5711aae2-4e88-4dcc-8736-0fb7ca22aef8\") " Feb 02 11:30:32 crc kubenswrapper[4838]: I0202 11:30:32.633284 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5711aae2-4e88-4dcc-8736-0fb7ca22aef8-catalog-content\") pod \"5711aae2-4e88-4dcc-8736-0fb7ca22aef8\" (UID: \"5711aae2-4e88-4dcc-8736-0fb7ca22aef8\") " Feb 02 11:30:32 crc kubenswrapper[4838]: I0202 11:30:32.633347 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtq2l\" (UniqueName: \"kubernetes.io/projected/5711aae2-4e88-4dcc-8736-0fb7ca22aef8-kube-api-access-xtq2l\") pod \"5711aae2-4e88-4dcc-8736-0fb7ca22aef8\" (UID: \"5711aae2-4e88-4dcc-8736-0fb7ca22aef8\") " Feb 02 11:30:32 crc kubenswrapper[4838]: I0202 11:30:32.634113 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5711aae2-4e88-4dcc-8736-0fb7ca22aef8-utilities" (OuterVolumeSpecName: "utilities") pod "5711aae2-4e88-4dcc-8736-0fb7ca22aef8" (UID: "5711aae2-4e88-4dcc-8736-0fb7ca22aef8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:30:32 crc kubenswrapper[4838]: I0202 11:30:32.635295 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5711aae2-4e88-4dcc-8736-0fb7ca22aef8-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 11:30:32 crc kubenswrapper[4838]: I0202 11:30:32.645830 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5711aae2-4e88-4dcc-8736-0fb7ca22aef8-kube-api-access-xtq2l" (OuterVolumeSpecName: "kube-api-access-xtq2l") pod "5711aae2-4e88-4dcc-8736-0fb7ca22aef8" (UID: "5711aae2-4e88-4dcc-8736-0fb7ca22aef8"). InnerVolumeSpecName "kube-api-access-xtq2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:30:32 crc kubenswrapper[4838]: I0202 11:30:32.737258 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtq2l\" (UniqueName: \"kubernetes.io/projected/5711aae2-4e88-4dcc-8736-0fb7ca22aef8-kube-api-access-xtq2l\") on node \"crc\" DevicePath \"\"" Feb 02 11:30:32 crc kubenswrapper[4838]: I0202 11:30:32.763686 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5711aae2-4e88-4dcc-8736-0fb7ca22aef8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5711aae2-4e88-4dcc-8736-0fb7ca22aef8" (UID: "5711aae2-4e88-4dcc-8736-0fb7ca22aef8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:30:32 crc kubenswrapper[4838]: I0202 11:30:32.841065 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5711aae2-4e88-4dcc-8736-0fb7ca22aef8-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 11:30:32 crc kubenswrapper[4838]: I0202 11:30:32.967356 4838 generic.go:334] "Generic (PLEG): container finished" podID="5711aae2-4e88-4dcc-8736-0fb7ca22aef8" containerID="b243463f5a854ce17f3fb46fbdb1f459adf5600c52da55f8d8b543b6c8df1ad8" exitCode=0 Feb 02 11:30:32 crc kubenswrapper[4838]: I0202 11:30:32.967409 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wc9p6" Feb 02 11:30:32 crc kubenswrapper[4838]: I0202 11:30:32.967421 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wc9p6" event={"ID":"5711aae2-4e88-4dcc-8736-0fb7ca22aef8","Type":"ContainerDied","Data":"b243463f5a854ce17f3fb46fbdb1f459adf5600c52da55f8d8b543b6c8df1ad8"} Feb 02 11:30:32 crc kubenswrapper[4838]: I0202 11:30:32.967470 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wc9p6" event={"ID":"5711aae2-4e88-4dcc-8736-0fb7ca22aef8","Type":"ContainerDied","Data":"684bcf7023ca6b8f9b319a9574960aad099a467154ebd05536078c7ac83c01c4"} Feb 02 11:30:32 crc kubenswrapper[4838]: I0202 11:30:32.967498 4838 scope.go:117] "RemoveContainer" containerID="b243463f5a854ce17f3fb46fbdb1f459adf5600c52da55f8d8b543b6c8df1ad8" Feb 02 11:30:32 crc kubenswrapper[4838]: I0202 11:30:32.993274 4838 scope.go:117] "RemoveContainer" containerID="cf5ca8cf3656d04775bbb8c6120f27d514e3f7ab468858bf5040ebdbce772214" Feb 02 11:30:33 crc kubenswrapper[4838]: I0202 11:30:33.016212 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wc9p6"] Feb 02 11:30:33 crc kubenswrapper[4838]: I0202 11:30:33.023989 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wc9p6"] Feb 02 11:30:33 crc kubenswrapper[4838]: I0202 11:30:33.032238 4838 scope.go:117] "RemoveContainer" containerID="4f837c208af5f6feb6498c12df29171a39a380e91db9e0146bab7b999c43253a" Feb 02 11:30:33 crc kubenswrapper[4838]: I0202 11:30:33.071039 4838 scope.go:117] "RemoveContainer" containerID="b243463f5a854ce17f3fb46fbdb1f459adf5600c52da55f8d8b543b6c8df1ad8" Feb 02 11:30:33 crc kubenswrapper[4838]: E0202 11:30:33.071608 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b243463f5a854ce17f3fb46fbdb1f459adf5600c52da55f8d8b543b6c8df1ad8\": container with ID starting with b243463f5a854ce17f3fb46fbdb1f459adf5600c52da55f8d8b543b6c8df1ad8 not found: ID does not exist" containerID="b243463f5a854ce17f3fb46fbdb1f459adf5600c52da55f8d8b543b6c8df1ad8" Feb 02 11:30:33 crc kubenswrapper[4838]: I0202 11:30:33.071681 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b243463f5a854ce17f3fb46fbdb1f459adf5600c52da55f8d8b543b6c8df1ad8"} err="failed to get container status \"b243463f5a854ce17f3fb46fbdb1f459adf5600c52da55f8d8b543b6c8df1ad8\": rpc error: code = NotFound desc = could not find container \"b243463f5a854ce17f3fb46fbdb1f459adf5600c52da55f8d8b543b6c8df1ad8\": container with ID starting with b243463f5a854ce17f3fb46fbdb1f459adf5600c52da55f8d8b543b6c8df1ad8 not found: ID does not exist" Feb 02 11:30:33 crc kubenswrapper[4838]: I0202 11:30:33.071729 4838 scope.go:117] "RemoveContainer" containerID="cf5ca8cf3656d04775bbb8c6120f27d514e3f7ab468858bf5040ebdbce772214" Feb 02 11:30:33 crc kubenswrapper[4838]: E0202 11:30:33.072262 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf5ca8cf3656d04775bbb8c6120f27d514e3f7ab468858bf5040ebdbce772214\": container with ID starting with cf5ca8cf3656d04775bbb8c6120f27d514e3f7ab468858bf5040ebdbce772214 not found: ID does not exist" containerID="cf5ca8cf3656d04775bbb8c6120f27d514e3f7ab468858bf5040ebdbce772214" Feb 02 11:30:33 crc kubenswrapper[4838]: I0202 11:30:33.072307 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf5ca8cf3656d04775bbb8c6120f27d514e3f7ab468858bf5040ebdbce772214"} err="failed to get container status \"cf5ca8cf3656d04775bbb8c6120f27d514e3f7ab468858bf5040ebdbce772214\": rpc error: code = NotFound desc = could not find container \"cf5ca8cf3656d04775bbb8c6120f27d514e3f7ab468858bf5040ebdbce772214\": container with ID starting with cf5ca8cf3656d04775bbb8c6120f27d514e3f7ab468858bf5040ebdbce772214 not found: ID does not exist" Feb 02 11:30:33 crc kubenswrapper[4838]: I0202 11:30:33.072331 4838 scope.go:117] "RemoveContainer" containerID="4f837c208af5f6feb6498c12df29171a39a380e91db9e0146bab7b999c43253a" Feb 02 11:30:33 crc kubenswrapper[4838]: E0202 11:30:33.072670 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f837c208af5f6feb6498c12df29171a39a380e91db9e0146bab7b999c43253a\": container with ID starting with 4f837c208af5f6feb6498c12df29171a39a380e91db9e0146bab7b999c43253a not found: ID does not exist" containerID="4f837c208af5f6feb6498c12df29171a39a380e91db9e0146bab7b999c43253a" Feb 02 11:30:33 crc kubenswrapper[4838]: I0202 11:30:33.072731 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f837c208af5f6feb6498c12df29171a39a380e91db9e0146bab7b999c43253a"} err="failed to get container status \"4f837c208af5f6feb6498c12df29171a39a380e91db9e0146bab7b999c43253a\": rpc error: code = NotFound desc = could not find container \"4f837c208af5f6feb6498c12df29171a39a380e91db9e0146bab7b999c43253a\": container with ID starting with 4f837c208af5f6feb6498c12df29171a39a380e91db9e0146bab7b999c43253a not found: ID does not exist" Feb 02 11:30:34 crc kubenswrapper[4838]: I0202 11:30:34.515547 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5711aae2-4e88-4dcc-8736-0fb7ca22aef8" path="/var/lib/kubelet/pods/5711aae2-4e88-4dcc-8736-0fb7ca22aef8/volumes" Feb 02 11:30:36 crc kubenswrapper[4838]: I0202 11:30:36.088217 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-5nmhg_eb0d3aa3-09b5-4b68-833d-03218e1794f0/control-plane-machine-set-operator/0.log" Feb 02 11:30:36 crc kubenswrapper[4838]: I0202 11:30:36.280867 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-v475b_b49e2d2f-5155-49bf-82f3-b68992ebe787/kube-rbac-proxy/0.log" Feb 02 11:30:36 crc kubenswrapper[4838]: I0202 11:30:36.306807 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-v475b_b49e2d2f-5155-49bf-82f3-b68992ebe787/machine-api-operator/0.log" Feb 02 11:30:49 crc kubenswrapper[4838]: I0202 11:30:49.278026 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-858654f9db-cl9xl_b7b5f720-0add-47c9-890a-4ca936379c93/cert-manager-controller/0.log" Feb 02 11:30:49 crc kubenswrapper[4838]: I0202 11:30:49.410852 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-cf98fcc89-t4jjz_5206c985-0926-498a-b2ae-3be3a5034206/cert-manager-cainjector/0.log" Feb 02 11:30:49 crc kubenswrapper[4838]: I0202 11:30:49.563708 4838 scope.go:117] "RemoveContainer" containerID="0c7187a3636eb3c3e489663f9051cf28d2d7e42f92ee96011b4b3331c631161a" Feb 02 11:30:49 crc kubenswrapper[4838]: I0202 11:30:49.587072 4838 scope.go:117] "RemoveContainer" containerID="4823bcd344d625ad561c7b904f665bccc6207c62fe0a225cf8d050bc4a5c40fd" Feb 02 11:30:49 crc kubenswrapper[4838]: I0202 11:30:49.608957 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-687f57d79b-tvmg6_59de6fa5-ca64-482d-81af-d1bfd5e7cba4/cert-manager-webhook/0.log" Feb 02 11:30:49 crc kubenswrapper[4838]: I0202 11:30:49.671572 4838 scope.go:117] "RemoveContainer" containerID="8426a6e4d214510a52f278be0c850961a4eb3e86434e21bbda26e1cacba5f3a2" Feb 02 11:31:02 crc kubenswrapper[4838]: I0202 11:31:02.459446 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-q98cv_4cbd203f-2073-4bee-8234-da99cf46562b/nmstate-console-plugin/0.log" Feb 02 11:31:02 crc kubenswrapper[4838]: I0202 11:31:02.668140 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-q9xb2_c3132ec2-5218-4b6f-8e19-dfce93103b19/kube-rbac-proxy/0.log" Feb 02 11:31:02 crc kubenswrapper[4838]: I0202 11:31:02.693692 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-ch2vl_539fff34-8b11-42d9-b32f-4c1cab281cf5/nmstate-handler/0.log" Feb 02 11:31:02 crc kubenswrapper[4838]: I0202 11:31:02.788226 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-q9xb2_c3132ec2-5218-4b6f-8e19-dfce93103b19/nmstate-metrics/0.log" Feb 02 11:31:02 crc kubenswrapper[4838]: I0202 11:31:02.932460 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-bggr6_eac05e66-32ef-40ab-833b-ffdb87e12159/nmstate-operator/0.log" Feb 02 11:31:02 crc kubenswrapper[4838]: I0202 11:31:02.987815 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-nwk6k_b94f73b7-7c9a-4c88-9180-76861894189e/nmstate-webhook/0.log" Feb 02 11:31:28 crc kubenswrapper[4838]: I0202 11:31:28.121749 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-7kdtd_b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4/kube-rbac-proxy/0.log" Feb 02 11:31:28 crc kubenswrapper[4838]: I0202 11:31:28.163899 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-7kdtd_b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4/controller/0.log" Feb 02 11:31:28 crc kubenswrapper[4838]: I0202 11:31:28.293384 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-frr-files/0.log" Feb 02 11:31:28 crc kubenswrapper[4838]: I0202 11:31:28.498917 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-reloader/0.log" Feb 02 11:31:28 crc kubenswrapper[4838]: I0202 11:31:28.504534 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-frr-files/0.log" Feb 02 11:31:28 crc kubenswrapper[4838]: I0202 11:31:28.545540 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-metrics/0.log" Feb 02 11:31:28 crc kubenswrapper[4838]: I0202 11:31:28.565168 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-reloader/0.log" Feb 02 11:31:28 crc kubenswrapper[4838]: I0202 11:31:28.675860 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-reloader/0.log" Feb 02 11:31:28 crc kubenswrapper[4838]: I0202 11:31:28.680200 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-frr-files/0.log" Feb 02 11:31:28 crc kubenswrapper[4838]: I0202 11:31:28.725337 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-metrics/0.log" Feb 02 11:31:28 crc kubenswrapper[4838]: I0202 11:31:28.727571 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-metrics/0.log" Feb 02 11:31:28 crc kubenswrapper[4838]: I0202 11:31:28.920288 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-reloader/0.log" Feb 02 11:31:28 crc kubenswrapper[4838]: I0202 11:31:28.939440 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-frr-files/0.log" Feb 02 11:31:28 crc kubenswrapper[4838]: I0202 11:31:28.947108 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-metrics/0.log" Feb 02 11:31:28 crc kubenswrapper[4838]: I0202 11:31:28.986266 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/controller/0.log" Feb 02 11:31:29 crc kubenswrapper[4838]: I0202 11:31:29.118441 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/kube-rbac-proxy/0.log" Feb 02 11:31:29 crc kubenswrapper[4838]: I0202 11:31:29.137341 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/frr-metrics/0.log" Feb 02 11:31:29 crc kubenswrapper[4838]: I0202 11:31:29.206210 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/kube-rbac-proxy-frr/0.log" Feb 02 11:31:29 crc kubenswrapper[4838]: I0202 11:31:29.354441 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/reloader/0.log" Feb 02 11:31:29 crc kubenswrapper[4838]: I0202 11:31:29.428994 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-czj6m_80562d51-943d-4213-abbd-099b4e891ce9/frr-k8s-webhook-server/0.log" Feb 02 11:31:29 crc kubenswrapper[4838]: I0202 11:31:29.632594 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-67dbdd759f-klfvs_734aaa76-0e63-4bf4-9b2d-60a0346dfcac/manager/0.log" Feb 02 11:31:29 crc kubenswrapper[4838]: I0202 11:31:29.779313 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-b56888666-82h5d_ffd0594c-3abc-4b1a-89e4-0face9bad35f/webhook-server/0.log" Feb 02 11:31:29 crc kubenswrapper[4838]: I0202 11:31:29.840194 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-pf2fp_306c1a4f-3d28-4cc9-91bd-a78c25803845/kube-rbac-proxy/0.log" Feb 02 11:31:30 crc kubenswrapper[4838]: I0202 11:31:30.344242 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/frr/0.log" Feb 02 11:31:30 crc kubenswrapper[4838]: I0202 11:31:30.456498 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-pf2fp_306c1a4f-3d28-4cc9-91bd-a78c25803845/speaker/0.log" Feb 02 11:31:42 crc kubenswrapper[4838]: I0202 11:31:42.524282 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g_5d5d76da-96c7-47aa-aeb7-e176ab3b89d3/util/0.log" Feb 02 11:31:42 crc kubenswrapper[4838]: I0202 11:31:42.702453 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g_5d5d76da-96c7-47aa-aeb7-e176ab3b89d3/util/0.log" Feb 02 11:31:42 crc kubenswrapper[4838]: I0202 11:31:42.750772 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g_5d5d76da-96c7-47aa-aeb7-e176ab3b89d3/pull/0.log" Feb 02 11:31:42 crc kubenswrapper[4838]: I0202 11:31:42.756920 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g_5d5d76da-96c7-47aa-aeb7-e176ab3b89d3/pull/0.log" Feb 02 11:31:42 crc kubenswrapper[4838]: I0202 11:31:42.960374 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g_5d5d76da-96c7-47aa-aeb7-e176ab3b89d3/extract/0.log" Feb 02 11:31:42 crc kubenswrapper[4838]: I0202 11:31:42.968313 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g_5d5d76da-96c7-47aa-aeb7-e176ab3b89d3/util/0.log" Feb 02 11:31:42 crc kubenswrapper[4838]: I0202 11:31:42.979311 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g_5d5d76da-96c7-47aa-aeb7-e176ab3b89d3/pull/0.log" Feb 02 11:31:43 crc kubenswrapper[4838]: I0202 11:31:43.122275 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl_a250321d-30c9-426a-b638-90dd5e9c036d/util/0.log" Feb 02 11:31:43 crc kubenswrapper[4838]: I0202 11:31:43.295731 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl_a250321d-30c9-426a-b638-90dd5e9c036d/util/0.log" Feb 02 11:31:43 crc kubenswrapper[4838]: I0202 11:31:43.306420 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl_a250321d-30c9-426a-b638-90dd5e9c036d/pull/0.log" Feb 02 11:31:43 crc kubenswrapper[4838]: I0202 11:31:43.323114 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl_a250321d-30c9-426a-b638-90dd5e9c036d/pull/0.log" Feb 02 11:31:43 crc kubenswrapper[4838]: I0202 11:31:43.474792 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl_a250321d-30c9-426a-b638-90dd5e9c036d/util/0.log" Feb 02 11:31:43 crc kubenswrapper[4838]: I0202 11:31:43.490766 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl_a250321d-30c9-426a-b638-90dd5e9c036d/pull/0.log" Feb 02 11:31:43 crc kubenswrapper[4838]: I0202 11:31:43.493538 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl_a250321d-30c9-426a-b638-90dd5e9c036d/extract/0.log" Feb 02 11:31:43 crc kubenswrapper[4838]: I0202 11:31:43.634751 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tdj7s_6da618e6-95eb-478d-a290-fb44dfef06f7/extract-utilities/0.log" Feb 02 11:31:43 crc kubenswrapper[4838]: I0202 11:31:43.808836 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tdj7s_6da618e6-95eb-478d-a290-fb44dfef06f7/extract-utilities/0.log" Feb 02 11:31:43 crc kubenswrapper[4838]: I0202 11:31:43.825404 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tdj7s_6da618e6-95eb-478d-a290-fb44dfef06f7/extract-content/0.log" Feb 02 11:31:43 crc kubenswrapper[4838]: I0202 11:31:43.855178 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tdj7s_6da618e6-95eb-478d-a290-fb44dfef06f7/extract-content/0.log" Feb 02 11:31:44 crc kubenswrapper[4838]: I0202 11:31:44.025403 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tdj7s_6da618e6-95eb-478d-a290-fb44dfef06f7/extract-content/0.log" Feb 02 11:31:44 crc kubenswrapper[4838]: I0202 11:31:44.030578 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tdj7s_6da618e6-95eb-478d-a290-fb44dfef06f7/extract-utilities/0.log" Feb 02 11:31:44 crc kubenswrapper[4838]: I0202 11:31:44.267904 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dcrh2_8d8db2f1-c01c-4848-923a-f4fb42f7d2be/extract-utilities/0.log" Feb 02 11:31:44 crc kubenswrapper[4838]: I0202 11:31:44.406769 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tdj7s_6da618e6-95eb-478d-a290-fb44dfef06f7/registry-server/0.log" Feb 02 11:31:44 crc kubenswrapper[4838]: I0202 11:31:44.454641 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dcrh2_8d8db2f1-c01c-4848-923a-f4fb42f7d2be/extract-content/0.log" Feb 02 11:31:44 crc kubenswrapper[4838]: I0202 11:31:44.471222 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dcrh2_8d8db2f1-c01c-4848-923a-f4fb42f7d2be/extract-utilities/0.log" Feb 02 11:31:44 crc kubenswrapper[4838]: I0202 11:31:44.501980 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dcrh2_8d8db2f1-c01c-4848-923a-f4fb42f7d2be/extract-content/0.log" Feb 02 11:31:44 crc kubenswrapper[4838]: I0202 11:31:44.626528 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dcrh2_8d8db2f1-c01c-4848-923a-f4fb42f7d2be/extract-utilities/0.log" Feb 02 11:31:44 crc kubenswrapper[4838]: I0202 11:31:44.637369 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dcrh2_8d8db2f1-c01c-4848-923a-f4fb42f7d2be/extract-content/0.log" Feb 02 11:31:44 crc kubenswrapper[4838]: I0202 11:31:44.837012 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-2kxfd_5214b35a-0948-41f6-b2d1-0dfc43009812/marketplace-operator/0.log" Feb 02 11:31:45 crc kubenswrapper[4838]: I0202 11:31:45.022552 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2f7cb_d5cef155-b12d-4e9b-81b7-9a224b8fe5b3/extract-utilities/0.log" Feb 02 11:31:45 crc kubenswrapper[4838]: I0202 11:31:45.050019 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dcrh2_8d8db2f1-c01c-4848-923a-f4fb42f7d2be/registry-server/0.log" Feb 02 11:31:45 crc kubenswrapper[4838]: I0202 11:31:45.139499 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2f7cb_d5cef155-b12d-4e9b-81b7-9a224b8fe5b3/extract-utilities/0.log" Feb 02 11:31:45 crc kubenswrapper[4838]: I0202 11:31:45.139508 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2f7cb_d5cef155-b12d-4e9b-81b7-9a224b8fe5b3/extract-content/0.log" Feb 02 11:31:45 crc kubenswrapper[4838]: I0202 11:31:45.201280 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2f7cb_d5cef155-b12d-4e9b-81b7-9a224b8fe5b3/extract-content/0.log" Feb 02 11:31:45 crc kubenswrapper[4838]: I0202 11:31:45.376167 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2f7cb_d5cef155-b12d-4e9b-81b7-9a224b8fe5b3/extract-content/0.log" Feb 02 11:31:45 crc kubenswrapper[4838]: I0202 11:31:45.414233 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2f7cb_d5cef155-b12d-4e9b-81b7-9a224b8fe5b3/extract-utilities/0.log" Feb 02 11:31:45 crc kubenswrapper[4838]: I0202 11:31:45.429729 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:31:45 crc kubenswrapper[4838]: I0202 11:31:45.429784 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:31:45 crc kubenswrapper[4838]: I0202 11:31:45.520517 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2f7cb_d5cef155-b12d-4e9b-81b7-9a224b8fe5b3/registry-server/0.log" Feb 02 11:31:45 crc kubenswrapper[4838]: I0202 11:31:45.594007 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-62q4t_71ec42fd-382d-43bd-9353-24e15ac2e795/extract-utilities/0.log" Feb 02 11:31:45 crc kubenswrapper[4838]: I0202 11:31:45.717034 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-62q4t_71ec42fd-382d-43bd-9353-24e15ac2e795/extract-utilities/0.log" Feb 02 11:31:45 crc kubenswrapper[4838]: I0202 11:31:45.722634 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-62q4t_71ec42fd-382d-43bd-9353-24e15ac2e795/extract-content/0.log" Feb 02 11:31:45 crc kubenswrapper[4838]: I0202 11:31:45.770271 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-62q4t_71ec42fd-382d-43bd-9353-24e15ac2e795/extract-content/0.log" Feb 02 11:31:45 crc kubenswrapper[4838]: I0202 11:31:45.943010 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-62q4t_71ec42fd-382d-43bd-9353-24e15ac2e795/extract-content/0.log" Feb 02 11:31:45 crc kubenswrapper[4838]: I0202 11:31:45.994736 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-62q4t_71ec42fd-382d-43bd-9353-24e15ac2e795/extract-utilities/0.log" Feb 02 11:31:46 crc kubenswrapper[4838]: I0202 11:31:46.362668 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-62q4t_71ec42fd-382d-43bd-9353-24e15ac2e795/registry-server/0.log" Feb 02 11:32:12 crc kubenswrapper[4838]: E0202 11:32:12.171197 4838 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.241:50628->38.102.83.241:43415: write tcp 38.102.83.241:50628->38.102.83.241:43415: write: broken pipe Feb 02 11:32:15 crc kubenswrapper[4838]: I0202 11:32:15.429329 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:32:15 crc kubenswrapper[4838]: I0202 11:32:15.429685 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:32:45 crc kubenswrapper[4838]: I0202 11:32:45.430147 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:32:45 crc kubenswrapper[4838]: I0202 11:32:45.430770 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:32:45 crc kubenswrapper[4838]: I0202 11:32:45.430856 4838 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 11:32:45 crc kubenswrapper[4838]: I0202 11:32:45.432228 4838 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980"} pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 11:32:45 crc kubenswrapper[4838]: I0202 11:32:45.432354 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" containerID="cri-o://fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" gracePeriod=600 Feb 02 11:32:45 crc kubenswrapper[4838]: E0202 11:32:45.582149 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:32:46 crc kubenswrapper[4838]: I0202 11:32:46.199365 4838 generic.go:334] "Generic (PLEG): container finished" podID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" exitCode=0 Feb 02 11:32:46 crc kubenswrapper[4838]: I0202 11:32:46.199497 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerDied","Data":"fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980"} Feb 02 11:32:46 crc kubenswrapper[4838]: I0202 11:32:46.199556 4838 scope.go:117] "RemoveContainer" containerID="175ffe8c2785ccb5a4bb5ad4bb28adaa432562c8282f7529b47cf5fe3050f259" Feb 02 11:32:46 crc kubenswrapper[4838]: I0202 11:32:46.204123 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:32:46 crc kubenswrapper[4838]: E0202 11:32:46.204493 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:32:57 crc kubenswrapper[4838]: I0202 11:32:57.506965 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:32:57 crc kubenswrapper[4838]: E0202 11:32:57.511432 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:33:09 crc kubenswrapper[4838]: I0202 11:33:09.506030 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:33:09 crc kubenswrapper[4838]: E0202 11:33:09.506972 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:33:21 crc kubenswrapper[4838]: I0202 11:33:21.505948 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:33:21 crc kubenswrapper[4838]: E0202 11:33:21.506792 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:33:25 crc kubenswrapper[4838]: I0202 11:33:25.558501 4838 generic.go:334] "Generic (PLEG): container finished" podID="abf3d322-df58-43d2-bd16-302cd0c158bc" containerID="1a6797b317718ed507b3bf849cb2fc36665096839e4c76adee307ce96fe89986" exitCode=0 Feb 02 11:33:25 crc kubenswrapper[4838]: I0202 11:33:25.558577 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wjz54/must-gather-29j7m" event={"ID":"abf3d322-df58-43d2-bd16-302cd0c158bc","Type":"ContainerDied","Data":"1a6797b317718ed507b3bf849cb2fc36665096839e4c76adee307ce96fe89986"} Feb 02 11:33:25 crc kubenswrapper[4838]: I0202 11:33:25.559576 4838 scope.go:117] "RemoveContainer" containerID="1a6797b317718ed507b3bf849cb2fc36665096839e4c76adee307ce96fe89986" Feb 02 11:33:25 crc kubenswrapper[4838]: I0202 11:33:25.693779 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wjz54_must-gather-29j7m_abf3d322-df58-43d2-bd16-302cd0c158bc/gather/0.log" Feb 02 11:33:33 crc kubenswrapper[4838]: I0202 11:33:33.506336 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:33:33 crc kubenswrapper[4838]: E0202 11:33:33.507131 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:33:33 crc kubenswrapper[4838]: I0202 11:33:33.684141 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wjz54/must-gather-29j7m"] Feb 02 11:33:33 crc kubenswrapper[4838]: I0202 11:33:33.684442 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-wjz54/must-gather-29j7m" podUID="abf3d322-df58-43d2-bd16-302cd0c158bc" containerName="copy" containerID="cri-o://1fb78069fad3acd5b8e9446eb004f3c493dbe6fe08ab7e9bee93b182ebdf5dbf" gracePeriod=2 Feb 02 11:33:33 crc kubenswrapper[4838]: I0202 11:33:33.693779 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wjz54/must-gather-29j7m"] Feb 02 11:33:34 crc kubenswrapper[4838]: I0202 11:33:34.109782 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wjz54_must-gather-29j7m_abf3d322-df58-43d2-bd16-302cd0c158bc/copy/0.log" Feb 02 11:33:34 crc kubenswrapper[4838]: I0202 11:33:34.110484 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wjz54/must-gather-29j7m" Feb 02 11:33:34 crc kubenswrapper[4838]: I0202 11:33:34.273905 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/abf3d322-df58-43d2-bd16-302cd0c158bc-must-gather-output\") pod \"abf3d322-df58-43d2-bd16-302cd0c158bc\" (UID: \"abf3d322-df58-43d2-bd16-302cd0c158bc\") " Feb 02 11:33:34 crc kubenswrapper[4838]: I0202 11:33:34.274054 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2mlmt\" (UniqueName: \"kubernetes.io/projected/abf3d322-df58-43d2-bd16-302cd0c158bc-kube-api-access-2mlmt\") pod \"abf3d322-df58-43d2-bd16-302cd0c158bc\" (UID: \"abf3d322-df58-43d2-bd16-302cd0c158bc\") " Feb 02 11:33:34 crc kubenswrapper[4838]: I0202 11:33:34.281388 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abf3d322-df58-43d2-bd16-302cd0c158bc-kube-api-access-2mlmt" (OuterVolumeSpecName: "kube-api-access-2mlmt") pod "abf3d322-df58-43d2-bd16-302cd0c158bc" (UID: "abf3d322-df58-43d2-bd16-302cd0c158bc"). InnerVolumeSpecName "kube-api-access-2mlmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:33:34 crc kubenswrapper[4838]: I0202 11:33:34.376923 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2mlmt\" (UniqueName: \"kubernetes.io/projected/abf3d322-df58-43d2-bd16-302cd0c158bc-kube-api-access-2mlmt\") on node \"crc\" DevicePath \"\"" Feb 02 11:33:34 crc kubenswrapper[4838]: I0202 11:33:34.424939 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abf3d322-df58-43d2-bd16-302cd0c158bc-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "abf3d322-df58-43d2-bd16-302cd0c158bc" (UID: "abf3d322-df58-43d2-bd16-302cd0c158bc"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:33:34 crc kubenswrapper[4838]: I0202 11:33:34.479586 4838 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/abf3d322-df58-43d2-bd16-302cd0c158bc-must-gather-output\") on node \"crc\" DevicePath \"\"" Feb 02 11:33:34 crc kubenswrapper[4838]: I0202 11:33:34.518238 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abf3d322-df58-43d2-bd16-302cd0c158bc" path="/var/lib/kubelet/pods/abf3d322-df58-43d2-bd16-302cd0c158bc/volumes" Feb 02 11:33:34 crc kubenswrapper[4838]: I0202 11:33:34.639385 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wjz54_must-gather-29j7m_abf3d322-df58-43d2-bd16-302cd0c158bc/copy/0.log" Feb 02 11:33:34 crc kubenswrapper[4838]: I0202 11:33:34.639887 4838 generic.go:334] "Generic (PLEG): container finished" podID="abf3d322-df58-43d2-bd16-302cd0c158bc" containerID="1fb78069fad3acd5b8e9446eb004f3c493dbe6fe08ab7e9bee93b182ebdf5dbf" exitCode=143 Feb 02 11:33:34 crc kubenswrapper[4838]: I0202 11:33:34.639947 4838 scope.go:117] "RemoveContainer" containerID="1fb78069fad3acd5b8e9446eb004f3c493dbe6fe08ab7e9bee93b182ebdf5dbf" Feb 02 11:33:34 crc kubenswrapper[4838]: I0202 11:33:34.639974 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wjz54/must-gather-29j7m" Feb 02 11:33:34 crc kubenswrapper[4838]: I0202 11:33:34.661985 4838 scope.go:117] "RemoveContainer" containerID="1a6797b317718ed507b3bf849cb2fc36665096839e4c76adee307ce96fe89986" Feb 02 11:33:34 crc kubenswrapper[4838]: I0202 11:33:34.738419 4838 scope.go:117] "RemoveContainer" containerID="1fb78069fad3acd5b8e9446eb004f3c493dbe6fe08ab7e9bee93b182ebdf5dbf" Feb 02 11:33:34 crc kubenswrapper[4838]: E0202 11:33:34.739283 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fb78069fad3acd5b8e9446eb004f3c493dbe6fe08ab7e9bee93b182ebdf5dbf\": container with ID starting with 1fb78069fad3acd5b8e9446eb004f3c493dbe6fe08ab7e9bee93b182ebdf5dbf not found: ID does not exist" containerID="1fb78069fad3acd5b8e9446eb004f3c493dbe6fe08ab7e9bee93b182ebdf5dbf" Feb 02 11:33:34 crc kubenswrapper[4838]: I0202 11:33:34.739382 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fb78069fad3acd5b8e9446eb004f3c493dbe6fe08ab7e9bee93b182ebdf5dbf"} err="failed to get container status \"1fb78069fad3acd5b8e9446eb004f3c493dbe6fe08ab7e9bee93b182ebdf5dbf\": rpc error: code = NotFound desc = could not find container \"1fb78069fad3acd5b8e9446eb004f3c493dbe6fe08ab7e9bee93b182ebdf5dbf\": container with ID starting with 1fb78069fad3acd5b8e9446eb004f3c493dbe6fe08ab7e9bee93b182ebdf5dbf not found: ID does not exist" Feb 02 11:33:34 crc kubenswrapper[4838]: I0202 11:33:34.739458 4838 scope.go:117] "RemoveContainer" containerID="1a6797b317718ed507b3bf849cb2fc36665096839e4c76adee307ce96fe89986" Feb 02 11:33:34 crc kubenswrapper[4838]: E0202 11:33:34.739940 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a6797b317718ed507b3bf849cb2fc36665096839e4c76adee307ce96fe89986\": container with ID starting with 1a6797b317718ed507b3bf849cb2fc36665096839e4c76adee307ce96fe89986 not found: ID does not exist" containerID="1a6797b317718ed507b3bf849cb2fc36665096839e4c76adee307ce96fe89986" Feb 02 11:33:34 crc kubenswrapper[4838]: I0202 11:33:34.739980 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a6797b317718ed507b3bf849cb2fc36665096839e4c76adee307ce96fe89986"} err="failed to get container status \"1a6797b317718ed507b3bf849cb2fc36665096839e4c76adee307ce96fe89986\": rpc error: code = NotFound desc = could not find container \"1a6797b317718ed507b3bf849cb2fc36665096839e4c76adee307ce96fe89986\": container with ID starting with 1a6797b317718ed507b3bf849cb2fc36665096839e4c76adee307ce96fe89986 not found: ID does not exist" Feb 02 11:33:46 crc kubenswrapper[4838]: I0202 11:33:46.512679 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:33:46 crc kubenswrapper[4838]: E0202 11:33:46.513713 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:33:57 crc kubenswrapper[4838]: I0202 11:33:57.512993 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:33:57 crc kubenswrapper[4838]: E0202 11:33:57.513925 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:34:08 crc kubenswrapper[4838]: I0202 11:34:08.506577 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:34:08 crc kubenswrapper[4838]: E0202 11:34:08.507422 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:34:21 crc kubenswrapper[4838]: I0202 11:34:21.506242 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:34:21 crc kubenswrapper[4838]: E0202 11:34:21.507079 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:34:32 crc kubenswrapper[4838]: I0202 11:34:32.507355 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:34:32 crc kubenswrapper[4838]: E0202 11:34:32.508238 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:34:47 crc kubenswrapper[4838]: I0202 11:34:47.506264 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:34:47 crc kubenswrapper[4838]: E0202 11:34:47.507153 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:34:49 crc kubenswrapper[4838]: I0202 11:34:49.893054 4838 scope.go:117] "RemoveContainer" containerID="ff42c1290b1ede43136cc4c470a465a0ba7fd915cca466e05db3fb5f398e2aa2" Feb 02 11:35:00 crc kubenswrapper[4838]: I0202 11:35:00.506596 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:35:00 crc kubenswrapper[4838]: E0202 11:35:00.507716 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:35:11 crc kubenswrapper[4838]: I0202 11:35:11.506304 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:35:11 crc kubenswrapper[4838]: E0202 11:35:11.507049 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:35:22 crc kubenswrapper[4838]: I0202 11:35:22.506484 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:35:22 crc kubenswrapper[4838]: E0202 11:35:22.507342 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:35:36 crc kubenswrapper[4838]: I0202 11:35:36.513036 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:35:36 crc kubenswrapper[4838]: E0202 11:35:36.513856 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:35:47 crc kubenswrapper[4838]: I0202 11:35:47.505882 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:35:47 crc kubenswrapper[4838]: E0202 11:35:47.506725 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:36:00 crc kubenswrapper[4838]: I0202 11:36:00.506604 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:36:00 crc kubenswrapper[4838]: E0202 11:36:00.507860 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:36:14 crc kubenswrapper[4838]: I0202 11:36:14.506903 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:36:14 crc kubenswrapper[4838]: E0202 11:36:14.507820 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.416958 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-cs9pd/must-gather-9qprb"] Feb 02 11:36:25 crc kubenswrapper[4838]: E0202 11:36:25.417788 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abf3d322-df58-43d2-bd16-302cd0c158bc" containerName="gather" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.417800 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="abf3d322-df58-43d2-bd16-302cd0c158bc" containerName="gather" Feb 02 11:36:25 crc kubenswrapper[4838]: E0202 11:36:25.417812 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d35866d-90e1-4309-ab96-a1135cd809b6" containerName="extract-content" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.417818 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d35866d-90e1-4309-ab96-a1135cd809b6" containerName="extract-content" Feb 02 11:36:25 crc kubenswrapper[4838]: E0202 11:36:25.417832 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d35866d-90e1-4309-ab96-a1135cd809b6" containerName="extract-utilities" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.417838 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d35866d-90e1-4309-ab96-a1135cd809b6" containerName="extract-utilities" Feb 02 11:36:25 crc kubenswrapper[4838]: E0202 11:36:25.417852 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abf3d322-df58-43d2-bd16-302cd0c158bc" containerName="copy" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.417859 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="abf3d322-df58-43d2-bd16-302cd0c158bc" containerName="copy" Feb 02 11:36:25 crc kubenswrapper[4838]: E0202 11:36:25.417873 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5711aae2-4e88-4dcc-8736-0fb7ca22aef8" containerName="extract-utilities" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.417878 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="5711aae2-4e88-4dcc-8736-0fb7ca22aef8" containerName="extract-utilities" Feb 02 11:36:25 crc kubenswrapper[4838]: E0202 11:36:25.417892 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d35866d-90e1-4309-ab96-a1135cd809b6" containerName="registry-server" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.417899 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d35866d-90e1-4309-ab96-a1135cd809b6" containerName="registry-server" Feb 02 11:36:25 crc kubenswrapper[4838]: E0202 11:36:25.417922 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5711aae2-4e88-4dcc-8736-0fb7ca22aef8" containerName="extract-content" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.417929 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="5711aae2-4e88-4dcc-8736-0fb7ca22aef8" containerName="extract-content" Feb 02 11:36:25 crc kubenswrapper[4838]: E0202 11:36:25.417942 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5711aae2-4e88-4dcc-8736-0fb7ca22aef8" containerName="registry-server" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.417948 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="5711aae2-4e88-4dcc-8736-0fb7ca22aef8" containerName="registry-server" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.418133 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d35866d-90e1-4309-ab96-a1135cd809b6" containerName="registry-server" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.418153 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="5711aae2-4e88-4dcc-8736-0fb7ca22aef8" containerName="registry-server" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.418163 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="abf3d322-df58-43d2-bd16-302cd0c158bc" containerName="copy" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.418173 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="abf3d322-df58-43d2-bd16-302cd0c158bc" containerName="gather" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.419087 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs9pd/must-gather-9qprb" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.423124 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-cs9pd"/"kube-root-ca.crt" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.423311 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-cs9pd"/"openshift-service-ca.crt" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.425016 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-cs9pd/must-gather-9qprb"] Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.567942 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ddff32a7-e027-4d2e-b77f-c7053ae36c59-must-gather-output\") pod \"must-gather-9qprb\" (UID: \"ddff32a7-e027-4d2e-b77f-c7053ae36c59\") " pod="openshift-must-gather-cs9pd/must-gather-9qprb" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.568291 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkvn9\" (UniqueName: \"kubernetes.io/projected/ddff32a7-e027-4d2e-b77f-c7053ae36c59-kube-api-access-pkvn9\") pod \"must-gather-9qprb\" (UID: \"ddff32a7-e027-4d2e-b77f-c7053ae36c59\") " pod="openshift-must-gather-cs9pd/must-gather-9qprb" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.669950 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ddff32a7-e027-4d2e-b77f-c7053ae36c59-must-gather-output\") pod \"must-gather-9qprb\" (UID: \"ddff32a7-e027-4d2e-b77f-c7053ae36c59\") " pod="openshift-must-gather-cs9pd/must-gather-9qprb" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.670105 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkvn9\" (UniqueName: \"kubernetes.io/projected/ddff32a7-e027-4d2e-b77f-c7053ae36c59-kube-api-access-pkvn9\") pod \"must-gather-9qprb\" (UID: \"ddff32a7-e027-4d2e-b77f-c7053ae36c59\") " pod="openshift-must-gather-cs9pd/must-gather-9qprb" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.670361 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ddff32a7-e027-4d2e-b77f-c7053ae36c59-must-gather-output\") pod \"must-gather-9qprb\" (UID: \"ddff32a7-e027-4d2e-b77f-c7053ae36c59\") " pod="openshift-must-gather-cs9pd/must-gather-9qprb" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.690083 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkvn9\" (UniqueName: \"kubernetes.io/projected/ddff32a7-e027-4d2e-b77f-c7053ae36c59-kube-api-access-pkvn9\") pod \"must-gather-9qprb\" (UID: \"ddff32a7-e027-4d2e-b77f-c7053ae36c59\") " pod="openshift-must-gather-cs9pd/must-gather-9qprb" Feb 02 11:36:25 crc kubenswrapper[4838]: I0202 11:36:25.743190 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs9pd/must-gather-9qprb" Feb 02 11:36:26 crc kubenswrapper[4838]: I0202 11:36:26.227465 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-cs9pd/must-gather-9qprb"] Feb 02 11:36:26 crc kubenswrapper[4838]: W0202 11:36:26.228778 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podddff32a7_e027_4d2e_b77f_c7053ae36c59.slice/crio-754f867080c7db24e845e5b04ccdc66e8984f72dea72358effa851cd798454a7 WatchSource:0}: Error finding container 754f867080c7db24e845e5b04ccdc66e8984f72dea72358effa851cd798454a7: Status 404 returned error can't find the container with id 754f867080c7db24e845e5b04ccdc66e8984f72dea72358effa851cd798454a7 Feb 02 11:36:26 crc kubenswrapper[4838]: I0202 11:36:26.528073 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:36:26 crc kubenswrapper[4838]: E0202 11:36:26.528789 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:36:27 crc kubenswrapper[4838]: I0202 11:36:27.212939 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs9pd/must-gather-9qprb" event={"ID":"ddff32a7-e027-4d2e-b77f-c7053ae36c59","Type":"ContainerStarted","Data":"13e93b89d3635aa8b45d0fa03b67c813f43b738aa898556bfca86c4416446edd"} Feb 02 11:36:27 crc kubenswrapper[4838]: I0202 11:36:27.213500 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs9pd/must-gather-9qprb" event={"ID":"ddff32a7-e027-4d2e-b77f-c7053ae36c59","Type":"ContainerStarted","Data":"51815ca6efcdba12f8e9d68a2e89ffee60df54c9aeb03a1f751a9fbea7403fbf"} Feb 02 11:36:27 crc kubenswrapper[4838]: I0202 11:36:27.213538 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs9pd/must-gather-9qprb" event={"ID":"ddff32a7-e027-4d2e-b77f-c7053ae36c59","Type":"ContainerStarted","Data":"754f867080c7db24e845e5b04ccdc66e8984f72dea72358effa851cd798454a7"} Feb 02 11:36:27 crc kubenswrapper[4838]: I0202 11:36:27.232884 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-cs9pd/must-gather-9qprb" podStartSLOduration=2.232862787 podStartE2EDuration="2.232862787s" podCreationTimestamp="2026-02-02 11:36:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:36:27.231065308 +0000 UTC m=+2581.568166346" watchObservedRunningTime="2026-02-02 11:36:27.232862787 +0000 UTC m=+2581.569963815" Feb 02 11:36:30 crc kubenswrapper[4838]: I0202 11:36:30.296727 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-cs9pd/crc-debug-wjtnj"] Feb 02 11:36:30 crc kubenswrapper[4838]: I0202 11:36:30.298742 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs9pd/crc-debug-wjtnj" Feb 02 11:36:30 crc kubenswrapper[4838]: I0202 11:36:30.300827 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-cs9pd"/"default-dockercfg-dszrx" Feb 02 11:36:30 crc kubenswrapper[4838]: I0202 11:36:30.461668 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e11deddf-d988-4955-bbb4-3141a6cc946d-host\") pod \"crc-debug-wjtnj\" (UID: \"e11deddf-d988-4955-bbb4-3141a6cc946d\") " pod="openshift-must-gather-cs9pd/crc-debug-wjtnj" Feb 02 11:36:30 crc kubenswrapper[4838]: I0202 11:36:30.461723 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsllp\" (UniqueName: \"kubernetes.io/projected/e11deddf-d988-4955-bbb4-3141a6cc946d-kube-api-access-fsllp\") pod \"crc-debug-wjtnj\" (UID: \"e11deddf-d988-4955-bbb4-3141a6cc946d\") " pod="openshift-must-gather-cs9pd/crc-debug-wjtnj" Feb 02 11:36:30 crc kubenswrapper[4838]: I0202 11:36:30.563650 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e11deddf-d988-4955-bbb4-3141a6cc946d-host\") pod \"crc-debug-wjtnj\" (UID: \"e11deddf-d988-4955-bbb4-3141a6cc946d\") " pod="openshift-must-gather-cs9pd/crc-debug-wjtnj" Feb 02 11:36:30 crc kubenswrapper[4838]: I0202 11:36:30.563714 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsllp\" (UniqueName: \"kubernetes.io/projected/e11deddf-d988-4955-bbb4-3141a6cc946d-kube-api-access-fsllp\") pod \"crc-debug-wjtnj\" (UID: \"e11deddf-d988-4955-bbb4-3141a6cc946d\") " pod="openshift-must-gather-cs9pd/crc-debug-wjtnj" Feb 02 11:36:30 crc kubenswrapper[4838]: I0202 11:36:30.563822 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e11deddf-d988-4955-bbb4-3141a6cc946d-host\") pod \"crc-debug-wjtnj\" (UID: \"e11deddf-d988-4955-bbb4-3141a6cc946d\") " pod="openshift-must-gather-cs9pd/crc-debug-wjtnj" Feb 02 11:36:30 crc kubenswrapper[4838]: I0202 11:36:30.592600 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsllp\" (UniqueName: \"kubernetes.io/projected/e11deddf-d988-4955-bbb4-3141a6cc946d-kube-api-access-fsllp\") pod \"crc-debug-wjtnj\" (UID: \"e11deddf-d988-4955-bbb4-3141a6cc946d\") " pod="openshift-must-gather-cs9pd/crc-debug-wjtnj" Feb 02 11:36:30 crc kubenswrapper[4838]: I0202 11:36:30.615951 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs9pd/crc-debug-wjtnj" Feb 02 11:36:30 crc kubenswrapper[4838]: W0202 11:36:30.650757 4838 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode11deddf_d988_4955_bbb4_3141a6cc946d.slice/crio-2f2c47fe198a3cae85b284e796676635f22b763657c1edf332a66e769145b42f WatchSource:0}: Error finding container 2f2c47fe198a3cae85b284e796676635f22b763657c1edf332a66e769145b42f: Status 404 returned error can't find the container with id 2f2c47fe198a3cae85b284e796676635f22b763657c1edf332a66e769145b42f Feb 02 11:36:31 crc kubenswrapper[4838]: I0202 11:36:31.242736 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs9pd/crc-debug-wjtnj" event={"ID":"e11deddf-d988-4955-bbb4-3141a6cc946d","Type":"ContainerStarted","Data":"7ea58378486b0ca0cdf4ba3e7124dd03bcec9ef75d9d04a8641e99dec371ee0e"} Feb 02 11:36:31 crc kubenswrapper[4838]: I0202 11:36:31.243268 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs9pd/crc-debug-wjtnj" event={"ID":"e11deddf-d988-4955-bbb4-3141a6cc946d","Type":"ContainerStarted","Data":"2f2c47fe198a3cae85b284e796676635f22b763657c1edf332a66e769145b42f"} Feb 02 11:36:31 crc kubenswrapper[4838]: I0202 11:36:31.263221 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-cs9pd/crc-debug-wjtnj" podStartSLOduration=1.263202102 podStartE2EDuration="1.263202102s" podCreationTimestamp="2026-02-02 11:36:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:36:31.255498712 +0000 UTC m=+2585.592599740" watchObservedRunningTime="2026-02-02 11:36:31.263202102 +0000 UTC m=+2585.600303130" Feb 02 11:36:38 crc kubenswrapper[4838]: I0202 11:36:38.506759 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:36:38 crc kubenswrapper[4838]: E0202 11:36:38.508633 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:36:49 crc kubenswrapper[4838]: I0202 11:36:49.505785 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:36:49 crc kubenswrapper[4838]: E0202 11:36:49.506508 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:37:00 crc kubenswrapper[4838]: I0202 11:37:00.506707 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:37:00 crc kubenswrapper[4838]: E0202 11:37:00.507496 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:37:03 crc kubenswrapper[4838]: I0202 11:37:03.506385 4838 generic.go:334] "Generic (PLEG): container finished" podID="e11deddf-d988-4955-bbb4-3141a6cc946d" containerID="7ea58378486b0ca0cdf4ba3e7124dd03bcec9ef75d9d04a8641e99dec371ee0e" exitCode=0 Feb 02 11:37:03 crc kubenswrapper[4838]: I0202 11:37:03.506590 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs9pd/crc-debug-wjtnj" event={"ID":"e11deddf-d988-4955-bbb4-3141a6cc946d","Type":"ContainerDied","Data":"7ea58378486b0ca0cdf4ba3e7124dd03bcec9ef75d9d04a8641e99dec371ee0e"} Feb 02 11:37:04 crc kubenswrapper[4838]: I0202 11:37:04.639378 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs9pd/crc-debug-wjtnj" Feb 02 11:37:04 crc kubenswrapper[4838]: I0202 11:37:04.693919 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-cs9pd/crc-debug-wjtnj"] Feb 02 11:37:04 crc kubenswrapper[4838]: I0202 11:37:04.703531 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-cs9pd/crc-debug-wjtnj"] Feb 02 11:37:04 crc kubenswrapper[4838]: I0202 11:37:04.745117 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e11deddf-d988-4955-bbb4-3141a6cc946d-host\") pod \"e11deddf-d988-4955-bbb4-3141a6cc946d\" (UID: \"e11deddf-d988-4955-bbb4-3141a6cc946d\") " Feb 02 11:37:04 crc kubenswrapper[4838]: I0202 11:37:04.745254 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e11deddf-d988-4955-bbb4-3141a6cc946d-host" (OuterVolumeSpecName: "host") pod "e11deddf-d988-4955-bbb4-3141a6cc946d" (UID: "e11deddf-d988-4955-bbb4-3141a6cc946d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:37:04 crc kubenswrapper[4838]: I0202 11:37:04.745407 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fsllp\" (UniqueName: \"kubernetes.io/projected/e11deddf-d988-4955-bbb4-3141a6cc946d-kube-api-access-fsllp\") pod \"e11deddf-d988-4955-bbb4-3141a6cc946d\" (UID: \"e11deddf-d988-4955-bbb4-3141a6cc946d\") " Feb 02 11:37:04 crc kubenswrapper[4838]: I0202 11:37:04.746097 4838 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e11deddf-d988-4955-bbb4-3141a6cc946d-host\") on node \"crc\" DevicePath \"\"" Feb 02 11:37:04 crc kubenswrapper[4838]: I0202 11:37:04.756879 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e11deddf-d988-4955-bbb4-3141a6cc946d-kube-api-access-fsllp" (OuterVolumeSpecName: "kube-api-access-fsllp") pod "e11deddf-d988-4955-bbb4-3141a6cc946d" (UID: "e11deddf-d988-4955-bbb4-3141a6cc946d"). InnerVolumeSpecName "kube-api-access-fsllp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:37:04 crc kubenswrapper[4838]: I0202 11:37:04.848127 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fsllp\" (UniqueName: \"kubernetes.io/projected/e11deddf-d988-4955-bbb4-3141a6cc946d-kube-api-access-fsllp\") on node \"crc\" DevicePath \"\"" Feb 02 11:37:05 crc kubenswrapper[4838]: I0202 11:37:05.527409 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f2c47fe198a3cae85b284e796676635f22b763657c1edf332a66e769145b42f" Feb 02 11:37:05 crc kubenswrapper[4838]: I0202 11:37:05.527492 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs9pd/crc-debug-wjtnj" Feb 02 11:37:05 crc kubenswrapper[4838]: I0202 11:37:05.876575 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-cs9pd/crc-debug-bg25k"] Feb 02 11:37:05 crc kubenswrapper[4838]: E0202 11:37:05.877008 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e11deddf-d988-4955-bbb4-3141a6cc946d" containerName="container-00" Feb 02 11:37:05 crc kubenswrapper[4838]: I0202 11:37:05.877022 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="e11deddf-d988-4955-bbb4-3141a6cc946d" containerName="container-00" Feb 02 11:37:05 crc kubenswrapper[4838]: I0202 11:37:05.877192 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="e11deddf-d988-4955-bbb4-3141a6cc946d" containerName="container-00" Feb 02 11:37:05 crc kubenswrapper[4838]: I0202 11:37:05.877793 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs9pd/crc-debug-bg25k" Feb 02 11:37:05 crc kubenswrapper[4838]: I0202 11:37:05.880591 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-cs9pd"/"default-dockercfg-dszrx" Feb 02 11:37:05 crc kubenswrapper[4838]: I0202 11:37:05.967899 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/df7e45c4-2f73-4470-9cfd-26432f5533de-host\") pod \"crc-debug-bg25k\" (UID: \"df7e45c4-2f73-4470-9cfd-26432f5533de\") " pod="openshift-must-gather-cs9pd/crc-debug-bg25k" Feb 02 11:37:05 crc kubenswrapper[4838]: I0202 11:37:05.968005 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kqzv\" (UniqueName: \"kubernetes.io/projected/df7e45c4-2f73-4470-9cfd-26432f5533de-kube-api-access-7kqzv\") pod \"crc-debug-bg25k\" (UID: \"df7e45c4-2f73-4470-9cfd-26432f5533de\") " pod="openshift-must-gather-cs9pd/crc-debug-bg25k" Feb 02 11:37:06 crc kubenswrapper[4838]: I0202 11:37:06.070378 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/df7e45c4-2f73-4470-9cfd-26432f5533de-host\") pod \"crc-debug-bg25k\" (UID: \"df7e45c4-2f73-4470-9cfd-26432f5533de\") " pod="openshift-must-gather-cs9pd/crc-debug-bg25k" Feb 02 11:37:06 crc kubenswrapper[4838]: I0202 11:37:06.070532 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kqzv\" (UniqueName: \"kubernetes.io/projected/df7e45c4-2f73-4470-9cfd-26432f5533de-kube-api-access-7kqzv\") pod \"crc-debug-bg25k\" (UID: \"df7e45c4-2f73-4470-9cfd-26432f5533de\") " pod="openshift-must-gather-cs9pd/crc-debug-bg25k" Feb 02 11:37:06 crc kubenswrapper[4838]: I0202 11:37:06.070580 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/df7e45c4-2f73-4470-9cfd-26432f5533de-host\") pod \"crc-debug-bg25k\" (UID: \"df7e45c4-2f73-4470-9cfd-26432f5533de\") " pod="openshift-must-gather-cs9pd/crc-debug-bg25k" Feb 02 11:37:06 crc kubenswrapper[4838]: I0202 11:37:06.095716 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kqzv\" (UniqueName: \"kubernetes.io/projected/df7e45c4-2f73-4470-9cfd-26432f5533de-kube-api-access-7kqzv\") pod \"crc-debug-bg25k\" (UID: \"df7e45c4-2f73-4470-9cfd-26432f5533de\") " pod="openshift-must-gather-cs9pd/crc-debug-bg25k" Feb 02 11:37:06 crc kubenswrapper[4838]: I0202 11:37:06.200093 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs9pd/crc-debug-bg25k" Feb 02 11:37:06 crc kubenswrapper[4838]: I0202 11:37:06.538523 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e11deddf-d988-4955-bbb4-3141a6cc946d" path="/var/lib/kubelet/pods/e11deddf-d988-4955-bbb4-3141a6cc946d/volumes" Feb 02 11:37:06 crc kubenswrapper[4838]: I0202 11:37:06.540010 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs9pd/crc-debug-bg25k" event={"ID":"df7e45c4-2f73-4470-9cfd-26432f5533de","Type":"ContainerStarted","Data":"8034e4333d5090023a77b8f09981f0820913a210b4e53e58fa08aa208fcde901"} Feb 02 11:37:06 crc kubenswrapper[4838]: I0202 11:37:06.540059 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs9pd/crc-debug-bg25k" event={"ID":"df7e45c4-2f73-4470-9cfd-26432f5533de","Type":"ContainerStarted","Data":"dd8935a59c5b13e719874a0124040c09869083797cac5b1e600409f14799688a"} Feb 02 11:37:06 crc kubenswrapper[4838]: I0202 11:37:06.557906 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-cs9pd/crc-debug-bg25k" podStartSLOduration=1.5578861229999998 podStartE2EDuration="1.557886123s" podCreationTimestamp="2026-02-02 11:37:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 11:37:06.553916935 +0000 UTC m=+2620.891017963" watchObservedRunningTime="2026-02-02 11:37:06.557886123 +0000 UTC m=+2620.894987151" Feb 02 11:37:07 crc kubenswrapper[4838]: I0202 11:37:07.549018 4838 generic.go:334] "Generic (PLEG): container finished" podID="df7e45c4-2f73-4470-9cfd-26432f5533de" containerID="8034e4333d5090023a77b8f09981f0820913a210b4e53e58fa08aa208fcde901" exitCode=0 Feb 02 11:37:07 crc kubenswrapper[4838]: I0202 11:37:07.549384 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs9pd/crc-debug-bg25k" event={"ID":"df7e45c4-2f73-4470-9cfd-26432f5533de","Type":"ContainerDied","Data":"8034e4333d5090023a77b8f09981f0820913a210b4e53e58fa08aa208fcde901"} Feb 02 11:37:08 crc kubenswrapper[4838]: I0202 11:37:08.704789 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs9pd/crc-debug-bg25k" Feb 02 11:37:08 crc kubenswrapper[4838]: I0202 11:37:08.739685 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-cs9pd/crc-debug-bg25k"] Feb 02 11:37:08 crc kubenswrapper[4838]: I0202 11:37:08.755221 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-cs9pd/crc-debug-bg25k"] Feb 02 11:37:08 crc kubenswrapper[4838]: I0202 11:37:08.827499 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/df7e45c4-2f73-4470-9cfd-26432f5533de-host\") pod \"df7e45c4-2f73-4470-9cfd-26432f5533de\" (UID: \"df7e45c4-2f73-4470-9cfd-26432f5533de\") " Feb 02 11:37:08 crc kubenswrapper[4838]: I0202 11:37:08.827569 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7kqzv\" (UniqueName: \"kubernetes.io/projected/df7e45c4-2f73-4470-9cfd-26432f5533de-kube-api-access-7kqzv\") pod \"df7e45c4-2f73-4470-9cfd-26432f5533de\" (UID: \"df7e45c4-2f73-4470-9cfd-26432f5533de\") " Feb 02 11:37:08 crc kubenswrapper[4838]: I0202 11:37:08.827658 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/df7e45c4-2f73-4470-9cfd-26432f5533de-host" (OuterVolumeSpecName: "host") pod "df7e45c4-2f73-4470-9cfd-26432f5533de" (UID: "df7e45c4-2f73-4470-9cfd-26432f5533de"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:37:08 crc kubenswrapper[4838]: I0202 11:37:08.828359 4838 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/df7e45c4-2f73-4470-9cfd-26432f5533de-host\") on node \"crc\" DevicePath \"\"" Feb 02 11:37:08 crc kubenswrapper[4838]: I0202 11:37:08.832332 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df7e45c4-2f73-4470-9cfd-26432f5533de-kube-api-access-7kqzv" (OuterVolumeSpecName: "kube-api-access-7kqzv") pod "df7e45c4-2f73-4470-9cfd-26432f5533de" (UID: "df7e45c4-2f73-4470-9cfd-26432f5533de"). InnerVolumeSpecName "kube-api-access-7kqzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:37:08 crc kubenswrapper[4838]: I0202 11:37:08.930005 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7kqzv\" (UniqueName: \"kubernetes.io/projected/df7e45c4-2f73-4470-9cfd-26432f5533de-kube-api-access-7kqzv\") on node \"crc\" DevicePath \"\"" Feb 02 11:37:09 crc kubenswrapper[4838]: I0202 11:37:09.564809 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd8935a59c5b13e719874a0124040c09869083797cac5b1e600409f14799688a" Feb 02 11:37:09 crc kubenswrapper[4838]: I0202 11:37:09.564884 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs9pd/crc-debug-bg25k" Feb 02 11:37:09 crc kubenswrapper[4838]: E0202 11:37:09.741503 4838 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf7e45c4_2f73_4470_9cfd_26432f5533de.slice/crio-dd8935a59c5b13e719874a0124040c09869083797cac5b1e600409f14799688a\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf7e45c4_2f73_4470_9cfd_26432f5533de.slice\": RecentStats: unable to find data in memory cache]" Feb 02 11:37:09 crc kubenswrapper[4838]: I0202 11:37:09.921222 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-cs9pd/crc-debug-vn4ng"] Feb 02 11:37:09 crc kubenswrapper[4838]: E0202 11:37:09.921718 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df7e45c4-2f73-4470-9cfd-26432f5533de" containerName="container-00" Feb 02 11:37:09 crc kubenswrapper[4838]: I0202 11:37:09.921734 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="df7e45c4-2f73-4470-9cfd-26432f5533de" containerName="container-00" Feb 02 11:37:09 crc kubenswrapper[4838]: I0202 11:37:09.921969 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="df7e45c4-2f73-4470-9cfd-26432f5533de" containerName="container-00" Feb 02 11:37:09 crc kubenswrapper[4838]: I0202 11:37:09.922759 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs9pd/crc-debug-vn4ng" Feb 02 11:37:09 crc kubenswrapper[4838]: I0202 11:37:09.928066 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-cs9pd"/"default-dockercfg-dszrx" Feb 02 11:37:10 crc kubenswrapper[4838]: I0202 11:37:10.048155 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87c6q\" (UniqueName: \"kubernetes.io/projected/ba4d78ed-26ea-4568-8132-18d473c147b5-kube-api-access-87c6q\") pod \"crc-debug-vn4ng\" (UID: \"ba4d78ed-26ea-4568-8132-18d473c147b5\") " pod="openshift-must-gather-cs9pd/crc-debug-vn4ng" Feb 02 11:37:10 crc kubenswrapper[4838]: I0202 11:37:10.048287 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ba4d78ed-26ea-4568-8132-18d473c147b5-host\") pod \"crc-debug-vn4ng\" (UID: \"ba4d78ed-26ea-4568-8132-18d473c147b5\") " pod="openshift-must-gather-cs9pd/crc-debug-vn4ng" Feb 02 11:37:10 crc kubenswrapper[4838]: I0202 11:37:10.150402 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87c6q\" (UniqueName: \"kubernetes.io/projected/ba4d78ed-26ea-4568-8132-18d473c147b5-kube-api-access-87c6q\") pod \"crc-debug-vn4ng\" (UID: \"ba4d78ed-26ea-4568-8132-18d473c147b5\") " pod="openshift-must-gather-cs9pd/crc-debug-vn4ng" Feb 02 11:37:10 crc kubenswrapper[4838]: I0202 11:37:10.150840 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ba4d78ed-26ea-4568-8132-18d473c147b5-host\") pod \"crc-debug-vn4ng\" (UID: \"ba4d78ed-26ea-4568-8132-18d473c147b5\") " pod="openshift-must-gather-cs9pd/crc-debug-vn4ng" Feb 02 11:37:10 crc kubenswrapper[4838]: I0202 11:37:10.150902 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ba4d78ed-26ea-4568-8132-18d473c147b5-host\") pod \"crc-debug-vn4ng\" (UID: \"ba4d78ed-26ea-4568-8132-18d473c147b5\") " pod="openshift-must-gather-cs9pd/crc-debug-vn4ng" Feb 02 11:37:10 crc kubenswrapper[4838]: I0202 11:37:10.168371 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87c6q\" (UniqueName: \"kubernetes.io/projected/ba4d78ed-26ea-4568-8132-18d473c147b5-kube-api-access-87c6q\") pod \"crc-debug-vn4ng\" (UID: \"ba4d78ed-26ea-4568-8132-18d473c147b5\") " pod="openshift-must-gather-cs9pd/crc-debug-vn4ng" Feb 02 11:37:10 crc kubenswrapper[4838]: I0202 11:37:10.248341 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs9pd/crc-debug-vn4ng" Feb 02 11:37:10 crc kubenswrapper[4838]: I0202 11:37:10.525543 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df7e45c4-2f73-4470-9cfd-26432f5533de" path="/var/lib/kubelet/pods/df7e45c4-2f73-4470-9cfd-26432f5533de/volumes" Feb 02 11:37:10 crc kubenswrapper[4838]: I0202 11:37:10.573747 4838 generic.go:334] "Generic (PLEG): container finished" podID="ba4d78ed-26ea-4568-8132-18d473c147b5" containerID="eb9ba43f848f5e93a04261f4b7a4926e6b7db3908bea3d9d6bb7b6ae8d9ac645" exitCode=0 Feb 02 11:37:10 crc kubenswrapper[4838]: I0202 11:37:10.573790 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs9pd/crc-debug-vn4ng" event={"ID":"ba4d78ed-26ea-4568-8132-18d473c147b5","Type":"ContainerDied","Data":"eb9ba43f848f5e93a04261f4b7a4926e6b7db3908bea3d9d6bb7b6ae8d9ac645"} Feb 02 11:37:10 crc kubenswrapper[4838]: I0202 11:37:10.573818 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs9pd/crc-debug-vn4ng" event={"ID":"ba4d78ed-26ea-4568-8132-18d473c147b5","Type":"ContainerStarted","Data":"0839fadc88989fabea06da8a2014a2f570703b4a3a881fa0031176b74eff7639"} Feb 02 11:37:10 crc kubenswrapper[4838]: I0202 11:37:10.602389 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-cs9pd/crc-debug-vn4ng"] Feb 02 11:37:10 crc kubenswrapper[4838]: I0202 11:37:10.609370 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-cs9pd/crc-debug-vn4ng"] Feb 02 11:37:11 crc kubenswrapper[4838]: I0202 11:37:11.506500 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:37:11 crc kubenswrapper[4838]: E0202 11:37:11.507047 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:37:11 crc kubenswrapper[4838]: I0202 11:37:11.699448 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs9pd/crc-debug-vn4ng" Feb 02 11:37:11 crc kubenswrapper[4838]: I0202 11:37:11.781576 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87c6q\" (UniqueName: \"kubernetes.io/projected/ba4d78ed-26ea-4568-8132-18d473c147b5-kube-api-access-87c6q\") pod \"ba4d78ed-26ea-4568-8132-18d473c147b5\" (UID: \"ba4d78ed-26ea-4568-8132-18d473c147b5\") " Feb 02 11:37:11 crc kubenswrapper[4838]: I0202 11:37:11.782077 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ba4d78ed-26ea-4568-8132-18d473c147b5-host\") pod \"ba4d78ed-26ea-4568-8132-18d473c147b5\" (UID: \"ba4d78ed-26ea-4568-8132-18d473c147b5\") " Feb 02 11:37:11 crc kubenswrapper[4838]: I0202 11:37:11.782175 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ba4d78ed-26ea-4568-8132-18d473c147b5-host" (OuterVolumeSpecName: "host") pod "ba4d78ed-26ea-4568-8132-18d473c147b5" (UID: "ba4d78ed-26ea-4568-8132-18d473c147b5"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 11:37:11 crc kubenswrapper[4838]: I0202 11:37:11.782954 4838 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ba4d78ed-26ea-4568-8132-18d473c147b5-host\") on node \"crc\" DevicePath \"\"" Feb 02 11:37:11 crc kubenswrapper[4838]: I0202 11:37:11.796436 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba4d78ed-26ea-4568-8132-18d473c147b5-kube-api-access-87c6q" (OuterVolumeSpecName: "kube-api-access-87c6q") pod "ba4d78ed-26ea-4568-8132-18d473c147b5" (UID: "ba4d78ed-26ea-4568-8132-18d473c147b5"). InnerVolumeSpecName "kube-api-access-87c6q". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:37:11 crc kubenswrapper[4838]: I0202 11:37:11.885362 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87c6q\" (UniqueName: \"kubernetes.io/projected/ba4d78ed-26ea-4568-8132-18d473c147b5-kube-api-access-87c6q\") on node \"crc\" DevicePath \"\"" Feb 02 11:37:12 crc kubenswrapper[4838]: I0202 11:37:12.534203 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba4d78ed-26ea-4568-8132-18d473c147b5" path="/var/lib/kubelet/pods/ba4d78ed-26ea-4568-8132-18d473c147b5/volumes" Feb 02 11:37:12 crc kubenswrapper[4838]: I0202 11:37:12.601660 4838 scope.go:117] "RemoveContainer" containerID="eb9ba43f848f5e93a04261f4b7a4926e6b7db3908bea3d9d6bb7b6ae8d9ac645" Feb 02 11:37:12 crc kubenswrapper[4838]: I0202 11:37:12.601684 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs9pd/crc-debug-vn4ng" Feb 02 11:37:23 crc kubenswrapper[4838]: I0202 11:37:23.505553 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:37:23 crc kubenswrapper[4838]: E0202 11:37:23.506432 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:37:34 crc kubenswrapper[4838]: I0202 11:37:34.506369 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:37:34 crc kubenswrapper[4838]: E0202 11:37:34.507092 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:37:36 crc kubenswrapper[4838]: I0202 11:37:36.778469 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-859b6c8866-ltwgg_a08effe2-908e-4666-8f0d-2348c91376cf/barbican-api/0.log" Feb 02 11:37:37 crc kubenswrapper[4838]: I0202 11:37:37.103473 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-859b6c8866-ltwgg_a08effe2-908e-4666-8f0d-2348c91376cf/barbican-api-log/0.log" Feb 02 11:37:37 crc kubenswrapper[4838]: I0202 11:37:37.177432 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6c7fd57b7d-nvvkt_4001ac84-f8ab-436b-b526-23940d7f0463/barbican-keystone-listener/0.log" Feb 02 11:37:37 crc kubenswrapper[4838]: I0202 11:37:37.310581 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6c7fd57b7d-nvvkt_4001ac84-f8ab-436b-b526-23940d7f0463/barbican-keystone-listener-log/0.log" Feb 02 11:37:37 crc kubenswrapper[4838]: I0202 11:37:37.382826 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5545c69c4f-jpg4z_ba0aafb6-d810-4b44-8e8a-eebc89abad94/barbican-worker-log/0.log" Feb 02 11:37:37 crc kubenswrapper[4838]: I0202 11:37:37.403327 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5545c69c4f-jpg4z_ba0aafb6-d810-4b44-8e8a-eebc89abad94/barbican-worker/0.log" Feb 02 11:37:37 crc kubenswrapper[4838]: I0202 11:37:37.558017 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b00c3f84-6034-4c0b-ad6c-52845d6743aa/ceilometer-central-agent/0.log" Feb 02 11:37:37 crc kubenswrapper[4838]: I0202 11:37:37.598838 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b00c3f84-6034-4c0b-ad6c-52845d6743aa/ceilometer-notification-agent/0.log" Feb 02 11:37:37 crc kubenswrapper[4838]: I0202 11:37:37.619292 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b00c3f84-6034-4c0b-ad6c-52845d6743aa/proxy-httpd/0.log" Feb 02 11:37:37 crc kubenswrapper[4838]: I0202 11:37:37.760053 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b00c3f84-6034-4c0b-ad6c-52845d6743aa/sg-core/0.log" Feb 02 11:37:37 crc kubenswrapper[4838]: I0202 11:37:37.832928 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0189f310-62ee-4f4d-b618-5afac393ff30/cinder-api/0.log" Feb 02 11:37:37 crc kubenswrapper[4838]: I0202 11:37:37.840837 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0189f310-62ee-4f4d-b618-5afac393ff30/cinder-api-log/0.log" Feb 02 11:37:38 crc kubenswrapper[4838]: I0202 11:37:38.016693 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_2c000131-c578-473f-8758-95ae23e12d3a/probe/0.log" Feb 02 11:37:38 crc kubenswrapper[4838]: I0202 11:37:38.175287 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_2c000131-c578-473f-8758-95ae23e12d3a/cinder-scheduler/0.log" Feb 02 11:37:38 crc kubenswrapper[4838]: I0202 11:37:38.184542 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-89c5cd4d5-bjxmv_2a16a8c7-7667-401d-93aa-d0209c7c6ea7/init/0.log" Feb 02 11:37:38 crc kubenswrapper[4838]: I0202 11:37:38.431116 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-89c5cd4d5-bjxmv_2a16a8c7-7667-401d-93aa-d0209c7c6ea7/init/0.log" Feb 02 11:37:38 crc kubenswrapper[4838]: I0202 11:37:38.491897 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-89c5cd4d5-bjxmv_2a16a8c7-7667-401d-93aa-d0209c7c6ea7/dnsmasq-dns/0.log" Feb 02 11:37:38 crc kubenswrapper[4838]: I0202 11:37:38.528001 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_5110c446-0e66-4098-b30a-dfbdbc8e5fbe/glance-httpd/0.log" Feb 02 11:37:38 crc kubenswrapper[4838]: I0202 11:37:38.603825 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_5110c446-0e66-4098-b30a-dfbdbc8e5fbe/glance-log/0.log" Feb 02 11:37:38 crc kubenswrapper[4838]: I0202 11:37:38.689606 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_8824c6f8-8e4e-436f-a4c5-755ac38d0979/glance-httpd/0.log" Feb 02 11:37:38 crc kubenswrapper[4838]: I0202 11:37:38.718349 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_8824c6f8-8e4e-436f-a4c5-755ac38d0979/glance-log/0.log" Feb 02 11:37:38 crc kubenswrapper[4838]: I0202 11:37:38.823278 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-68776656b4-wsgxk_e6281d3a-f605-47fd-a334-f5d814a86d4f/init/0.log" Feb 02 11:37:38 crc kubenswrapper[4838]: I0202 11:37:38.984816 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-68776656b4-wsgxk_e6281d3a-f605-47fd-a334-f5d814a86d4f/ironic-api-log/0.log" Feb 02 11:37:39 crc kubenswrapper[4838]: I0202 11:37:39.012504 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-68776656b4-wsgxk_e6281d3a-f605-47fd-a334-f5d814a86d4f/init/0.log" Feb 02 11:37:39 crc kubenswrapper[4838]: I0202 11:37:39.074539 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-68776656b4-wsgxk_e6281d3a-f605-47fd-a334-f5d814a86d4f/ironic-api/0.log" Feb 02 11:37:39 crc kubenswrapper[4838]: I0202 11:37:39.215904 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/init/0.log" Feb 02 11:37:39 crc kubenswrapper[4838]: I0202 11:37:39.343419 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/init/0.log" Feb 02 11:37:39 crc kubenswrapper[4838]: I0202 11:37:39.361968 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/ironic-python-agent-init/0.log" Feb 02 11:37:39 crc kubenswrapper[4838]: I0202 11:37:39.421286 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/ironic-python-agent-init/0.log" Feb 02 11:37:39 crc kubenswrapper[4838]: I0202 11:37:39.606030 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/init/0.log" Feb 02 11:37:39 crc kubenswrapper[4838]: I0202 11:37:39.651805 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/ironic-python-agent-init/0.log" Feb 02 11:37:40 crc kubenswrapper[4838]: I0202 11:37:40.061259 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/init/0.log" Feb 02 11:37:40 crc kubenswrapper[4838]: I0202 11:37:40.290146 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/ironic-python-agent-init/0.log" Feb 02 11:37:40 crc kubenswrapper[4838]: I0202 11:37:40.796810 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/pxe-init/0.log" Feb 02 11:37:40 crc kubenswrapper[4838]: I0202 11:37:40.872919 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/httpboot/0.log" Feb 02 11:37:41 crc kubenswrapper[4838]: I0202 11:37:41.039010 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/ramdisk-logs/0.log" Feb 02 11:37:41 crc kubenswrapper[4838]: I0202 11:37:41.091314 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/ironic-conductor/0.log" Feb 02 11:37:41 crc kubenswrapper[4838]: I0202 11:37:41.144893 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/pxe-init/0.log" Feb 02 11:37:41 crc kubenswrapper[4838]: I0202 11:37:41.304951 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-db-sync-z8sb2_04bf896a-e964-48a2-900e-44362394a6ac/init/0.log" Feb 02 11:37:41 crc kubenswrapper[4838]: I0202 11:37:41.323950 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/pxe-init/0.log" Feb 02 11:37:41 crc kubenswrapper[4838]: I0202 11:37:41.636054 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_3318b8c1-22ca-45c4-a2fd-90205cea5a72/pxe-init/0.log" Feb 02 11:37:41 crc kubenswrapper[4838]: I0202 11:37:41.662266 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-db-sync-z8sb2_04bf896a-e964-48a2-900e-44362394a6ac/init/0.log" Feb 02 11:37:41 crc kubenswrapper[4838]: I0202 11:37:41.731171 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-db-sync-z8sb2_04bf896a-e964-48a2-900e-44362394a6ac/ironic-db-sync/0.log" Feb 02 11:37:41 crc kubenswrapper[4838]: I0202 11:37:41.796459 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_fe57cd8a-a524-426c-a2f4-401cd5642248/ironic-python-agent-init/0.log" Feb 02 11:37:41 crc kubenswrapper[4838]: I0202 11:37:41.948822 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_fe57cd8a-a524-426c-a2f4-401cd5642248/ironic-python-agent-init/0.log" Feb 02 11:37:41 crc kubenswrapper[4838]: I0202 11:37:41.978200 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_fe57cd8a-a524-426c-a2f4-401cd5642248/inspector-pxe-init/0.log" Feb 02 11:37:41 crc kubenswrapper[4838]: I0202 11:37:41.991476 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_fe57cd8a-a524-426c-a2f4-401cd5642248/inspector-pxe-init/0.log" Feb 02 11:37:42 crc kubenswrapper[4838]: I0202 11:37:42.190465 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_fe57cd8a-a524-426c-a2f4-401cd5642248/ironic-python-agent-init/0.log" Feb 02 11:37:42 crc kubenswrapper[4838]: I0202 11:37:42.201713 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_fe57cd8a-a524-426c-a2f4-401cd5642248/inspector-httpboot/0.log" Feb 02 11:37:42 crc kubenswrapper[4838]: I0202 11:37:42.205226 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_fe57cd8a-a524-426c-a2f4-401cd5642248/inspector-pxe-init/0.log" Feb 02 11:37:42 crc kubenswrapper[4838]: I0202 11:37:42.318205 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_fe57cd8a-a524-426c-a2f4-401cd5642248/ironic-inspector/0.log" Feb 02 11:37:42 crc kubenswrapper[4838]: I0202 11:37:42.323753 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_fe57cd8a-a524-426c-a2f4-401cd5642248/ironic-inspector-httpd/0.log" Feb 02 11:37:42 crc kubenswrapper[4838]: I0202 11:37:42.402446 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_fe57cd8a-a524-426c-a2f4-401cd5642248/ramdisk-logs/0.log" Feb 02 11:37:42 crc kubenswrapper[4838]: I0202 11:37:42.458964 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-db-sync-s47dm_1a115a1d-336b-4c0d-81c4-3ce5c52b05a5/ironic-inspector-db-sync/0.log" Feb 02 11:37:42 crc kubenswrapper[4838]: I0202 11:37:42.571960 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-neutron-agent-9c5f849b9-h2frc_cb80ba2d-57e2-4a6d-95cc-e67af228cb54/ironic-neutron-agent/0.log" Feb 02 11:37:42 crc kubenswrapper[4838]: I0202 11:37:42.782277 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_253cbeb6-ec5a-404a-904c-d06b377ed987/kube-state-metrics/0.log" Feb 02 11:37:42 crc kubenswrapper[4838]: I0202 11:37:42.848658 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-554c88cfc6-svbvn_04f65e34-3c92-4288-86f4-cfc67c46de23/keystone-api/0.log" Feb 02 11:37:43 crc kubenswrapper[4838]: I0202 11:37:43.091163 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-595df946b7-5b7qm_de9fd7ce-936c-460d-b33b-e9b089a7d495/neutron-httpd/0.log" Feb 02 11:37:43 crc kubenswrapper[4838]: I0202 11:37:43.325671 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-595df946b7-5b7qm_de9fd7ce-936c-460d-b33b-e9b089a7d495/neutron-api/0.log" Feb 02 11:37:43 crc kubenswrapper[4838]: I0202 11:37:43.448016 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_b3dc8cbc-2c9c-4192-99e3-7724d3c28c68/nova-api-log/0.log" Feb 02 11:37:43 crc kubenswrapper[4838]: I0202 11:37:43.628083 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_b3dc8cbc-2c9c-4192-99e3-7724d3c28c68/nova-api-api/0.log" Feb 02 11:37:43 crc kubenswrapper[4838]: I0202 11:37:43.769420 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_29a3c9fb-a43f-4867-94e9-dd205f0fb517/nova-cell0-conductor-conductor/0.log" Feb 02 11:37:43 crc kubenswrapper[4838]: I0202 11:37:43.798904 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_966289ba-cb66-4cf4-adff-45ac19b18add/nova-cell1-conductor-conductor/0.log" Feb 02 11:37:44 crc kubenswrapper[4838]: I0202 11:37:44.018671 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_7e38e111-e96a-4196-84d2-9f6f2cd192dc/nova-cell1-novncproxy-novncproxy/0.log" Feb 02 11:37:44 crc kubenswrapper[4838]: I0202 11:37:44.283584 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_b6d9b193-d35a-40e7-87da-b20cfaca82b4/nova-metadata-log/0.log" Feb 02 11:37:44 crc kubenswrapper[4838]: I0202 11:37:44.551804 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_1f864d41-d3ba-4025-9ffc-d60bb52a18b1/nova-scheduler-scheduler/0.log" Feb 02 11:37:44 crc kubenswrapper[4838]: I0202 11:37:44.583603 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_2f841e0c-b40f-4dd1-8427-ea07840bcdf6/mysql-bootstrap/0.log" Feb 02 11:37:44 crc kubenswrapper[4838]: I0202 11:37:44.771552 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_2f841e0c-b40f-4dd1-8427-ea07840bcdf6/galera/0.log" Feb 02 11:37:44 crc kubenswrapper[4838]: I0202 11:37:44.794543 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_2f841e0c-b40f-4dd1-8427-ea07840bcdf6/mysql-bootstrap/0.log" Feb 02 11:37:44 crc kubenswrapper[4838]: I0202 11:37:44.897761 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_b6d9b193-d35a-40e7-87da-b20cfaca82b4/nova-metadata-metadata/0.log" Feb 02 11:37:44 crc kubenswrapper[4838]: I0202 11:37:44.951080 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5b42feb4-a718-4036-be9e-3113b97680c4/mysql-bootstrap/0.log" Feb 02 11:37:45 crc kubenswrapper[4838]: I0202 11:37:45.240128 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5b42feb4-a718-4036-be9e-3113b97680c4/mysql-bootstrap/0.log" Feb 02 11:37:45 crc kubenswrapper[4838]: I0202 11:37:45.242389 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5b42feb4-a718-4036-be9e-3113b97680c4/galera/0.log" Feb 02 11:37:45 crc kubenswrapper[4838]: I0202 11:37:45.317178 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_7cb40c61-e9a4-44fb-b15c-fa2ffb3b7406/openstackclient/0.log" Feb 02 11:37:45 crc kubenswrapper[4838]: I0202 11:37:45.514633 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-78llm_89d377c3-4929-47c4-abc7-53bb5e058025/ovn-controller/0.log" Feb 02 11:37:45 crc kubenswrapper[4838]: I0202 11:37:45.533472 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-p2lkw_1ecdb5d4-caae-49d5-80f4-a806c1d4bc0e/openstack-network-exporter/0.log" Feb 02 11:37:45 crc kubenswrapper[4838]: I0202 11:37:45.689558 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gmk9d_6201c1a8-a058-4029-ac96-17f4500b9fc0/ovsdb-server-init/0.log" Feb 02 11:37:45 crc kubenswrapper[4838]: I0202 11:37:45.956194 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gmk9d_6201c1a8-a058-4029-ac96-17f4500b9fc0/ovsdb-server/0.log" Feb 02 11:37:45 crc kubenswrapper[4838]: I0202 11:37:45.967277 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gmk9d_6201c1a8-a058-4029-ac96-17f4500b9fc0/ovs-vswitchd/0.log" Feb 02 11:37:46 crc kubenswrapper[4838]: I0202 11:37:46.060394 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gmk9d_6201c1a8-a058-4029-ac96-17f4500b9fc0/ovsdb-server-init/0.log" Feb 02 11:37:46 crc kubenswrapper[4838]: I0202 11:37:46.206072 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_6e6a9dbb-63ef-4cf2-b725-254ad752937d/ovn-northd/0.log" Feb 02 11:37:46 crc kubenswrapper[4838]: I0202 11:37:46.220449 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_6e6a9dbb-63ef-4cf2-b725-254ad752937d/openstack-network-exporter/0.log" Feb 02 11:37:46 crc kubenswrapper[4838]: I0202 11:37:46.333167 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_bbb8b7a6-28d0-40fa-bdcb-fe95357c8018/openstack-network-exporter/0.log" Feb 02 11:37:46 crc kubenswrapper[4838]: I0202 11:37:46.448927 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_bbb8b7a6-28d0-40fa-bdcb-fe95357c8018/ovsdbserver-nb/0.log" Feb 02 11:37:46 crc kubenswrapper[4838]: I0202 11:37:46.513371 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:37:46 crc kubenswrapper[4838]: I0202 11:37:46.526044 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_a8af70b4-152f-4edb-a4c5-afc8baed3685/openstack-network-exporter/0.log" Feb 02 11:37:46 crc kubenswrapper[4838]: I0202 11:37:46.538382 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_a8af70b4-152f-4edb-a4c5-afc8baed3685/ovsdbserver-sb/0.log" Feb 02 11:37:46 crc kubenswrapper[4838]: I0202 11:37:46.730700 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-b4ffff5fb-46ldv_32a23a44-9dae-46da-9895-dcd512447d9c/placement-api/0.log" Feb 02 11:37:46 crc kubenswrapper[4838]: I0202 11:37:46.825803 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-b4ffff5fb-46ldv_32a23a44-9dae-46da-9895-dcd512447d9c/placement-log/0.log" Feb 02 11:37:46 crc kubenswrapper[4838]: I0202 11:37:46.939819 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_a21f4822-c0c1-4b73-bad3-ddf3552c9ebd/setup-container/0.log" Feb 02 11:37:46 crc kubenswrapper[4838]: I0202 11:37:46.941006 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerStarted","Data":"c9402ad06463da91e1442dcbaf0f24edbf39af95cfe72e53a949140dfa1034a3"} Feb 02 11:37:47 crc kubenswrapper[4838]: I0202 11:37:47.140255 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c6516e19-8887-4dda-a635-bc93da2a19a6/setup-container/0.log" Feb 02 11:37:47 crc kubenswrapper[4838]: I0202 11:37:47.168801 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_a21f4822-c0c1-4b73-bad3-ddf3552c9ebd/setup-container/0.log" Feb 02 11:37:47 crc kubenswrapper[4838]: I0202 11:37:47.305147 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_a21f4822-c0c1-4b73-bad3-ddf3552c9ebd/rabbitmq/0.log" Feb 02 11:37:47 crc kubenswrapper[4838]: I0202 11:37:47.634079 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c6516e19-8887-4dda-a635-bc93da2a19a6/setup-container/0.log" Feb 02 11:37:47 crc kubenswrapper[4838]: I0202 11:37:47.670506 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c6516e19-8887-4dda-a635-bc93da2a19a6/rabbitmq/0.log" Feb 02 11:37:47 crc kubenswrapper[4838]: I0202 11:37:47.852037 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-655dd9ff5-m4vn6_f5af71cb-2380-4977-9a44-ece13d4ce18a/proxy-httpd/0.log" Feb 02 11:37:47 crc kubenswrapper[4838]: I0202 11:37:47.861659 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-655dd9ff5-m4vn6_f5af71cb-2380-4977-9a44-ece13d4ce18a/proxy-server/0.log" Feb 02 11:37:47 crc kubenswrapper[4838]: I0202 11:37:47.963312 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-524zj_582a43e1-d21a-4421-ae28-0eecd147d19e/swift-ring-rebalance/0.log" Feb 02 11:37:48 crc kubenswrapper[4838]: I0202 11:37:48.074121 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/account-auditor/0.log" Feb 02 11:37:48 crc kubenswrapper[4838]: I0202 11:37:48.176454 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/account-replicator/0.log" Feb 02 11:37:48 crc kubenswrapper[4838]: I0202 11:37:48.181125 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/account-reaper/0.log" Feb 02 11:37:48 crc kubenswrapper[4838]: I0202 11:37:48.276853 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/container-auditor/0.log" Feb 02 11:37:48 crc kubenswrapper[4838]: I0202 11:37:48.317231 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/account-server/0.log" Feb 02 11:37:48 crc kubenswrapper[4838]: I0202 11:37:48.409063 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/container-replicator/0.log" Feb 02 11:37:48 crc kubenswrapper[4838]: I0202 11:37:48.436633 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/container-server/0.log" Feb 02 11:37:48 crc kubenswrapper[4838]: I0202 11:37:48.512111 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/container-updater/0.log" Feb 02 11:37:48 crc kubenswrapper[4838]: I0202 11:37:48.548594 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/object-auditor/0.log" Feb 02 11:37:48 crc kubenswrapper[4838]: I0202 11:37:48.601243 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/object-expirer/0.log" Feb 02 11:37:48 crc kubenswrapper[4838]: I0202 11:37:48.695679 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/object-replicator/0.log" Feb 02 11:37:48 crc kubenswrapper[4838]: I0202 11:37:48.732444 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/object-server/0.log" Feb 02 11:37:48 crc kubenswrapper[4838]: I0202 11:37:48.795750 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/object-updater/0.log" Feb 02 11:37:48 crc kubenswrapper[4838]: I0202 11:37:48.796416 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/rsync/0.log" Feb 02 11:37:48 crc kubenswrapper[4838]: I0202 11:37:48.930287 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_12e53950-9847-46b2-a51a-1fac5b690098/swift-recon-cron/0.log" Feb 02 11:37:56 crc kubenswrapper[4838]: I0202 11:37:56.190352 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_605eae6b-dbaf-4781-97bb-2ef09397141d/memcached/0.log" Feb 02 11:38:13 crc kubenswrapper[4838]: I0202 11:38:13.700841 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97_a0ffc923-92b7-4528-963f-bb993ecb20c1/util/0.log" Feb 02 11:38:13 crc kubenswrapper[4838]: I0202 11:38:13.916083 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97_a0ffc923-92b7-4528-963f-bb993ecb20c1/pull/0.log" Feb 02 11:38:13 crc kubenswrapper[4838]: I0202 11:38:13.919680 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97_a0ffc923-92b7-4528-963f-bb993ecb20c1/pull/0.log" Feb 02 11:38:13 crc kubenswrapper[4838]: I0202 11:38:13.958857 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97_a0ffc923-92b7-4528-963f-bb993ecb20c1/util/0.log" Feb 02 11:38:14 crc kubenswrapper[4838]: I0202 11:38:14.140337 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97_a0ffc923-92b7-4528-963f-bb993ecb20c1/util/0.log" Feb 02 11:38:14 crc kubenswrapper[4838]: I0202 11:38:14.165440 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97_a0ffc923-92b7-4528-963f-bb993ecb20c1/pull/0.log" Feb 02 11:38:14 crc kubenswrapper[4838]: I0202 11:38:14.169157 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_07cc310619b9ab4095c762652e78c923e2fbab33543de9c8c9288ed97fhfk97_a0ffc923-92b7-4528-963f-bb993ecb20c1/extract/0.log" Feb 02 11:38:14 crc kubenswrapper[4838]: I0202 11:38:14.361177 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7b6c4d8c5f-db2x5_cb4f687b-4b19-447b-beb4-1646c2a40800/manager/0.log" Feb 02 11:38:14 crc kubenswrapper[4838]: I0202 11:38:14.413611 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-8d874c8fc-hm4jh_13ab41db-f38e-4980-89f9-361236526dfa/manager/0.log" Feb 02 11:38:14 crc kubenswrapper[4838]: I0202 11:38:14.548767 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-6d9697b7f4-rs64q_e358aab4-cbb0-4522-8740-6646b7fdcabd/manager/0.log" Feb 02 11:38:14 crc kubenswrapper[4838]: I0202 11:38:14.624041 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-8886f4c47-xftmv_084b46d2-88a9-42e4-83b2-dbccf264aafe/manager/0.log" Feb 02 11:38:14 crc kubenswrapper[4838]: I0202 11:38:14.747738 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-69d6db494d-qrf72_9cfe65eb-c657-4f96-b48f-1c9831fd75ba/manager/0.log" Feb 02 11:38:14 crc kubenswrapper[4838]: I0202 11:38:14.789908 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5fb775575f-2kqdq_ccd2dd8e-7b67-4b94-9b9a-b76fab87903c/manager/0.log" Feb 02 11:38:15 crc kubenswrapper[4838]: I0202 11:38:15.069726 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7c6b8858cc-lk5ts_79dd465e-2e36-423e-af5b-f41d715c0297/manager/0.log" Feb 02 11:38:15 crc kubenswrapper[4838]: I0202 11:38:15.263218 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-79955696d6-thn2f_5e0647d6-93ed-40f1-a522-f5ecf769dd14/manager/0.log" Feb 02 11:38:15 crc kubenswrapper[4838]: I0202 11:38:15.285729 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7dd968899f-5s687_75858b44-9a09-43f3-8de5-8ae999ae2657/manager/0.log" Feb 02 11:38:15 crc kubenswrapper[4838]: I0202 11:38:15.332890 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-84f48565d4-tn9zr_4b7d42b0-25f5-40d4-8deb-34841b6c8c92/manager/0.log" Feb 02 11:38:15 crc kubenswrapper[4838]: I0202 11:38:15.449078 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf948998-q6xd2_8291636d-bcda-4171-825a-f0f3c73b1320/manager/0.log" Feb 02 11:38:15 crc kubenswrapper[4838]: I0202 11:38:15.577745 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-585dbc889-52b6h_53860709-50fb-44d9-910b-d4142608d8d8/manager/0.log" Feb 02 11:38:15 crc kubenswrapper[4838]: I0202 11:38:15.713501 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-55bff696bd-pms7g_a17b67e7-df64-4f12-8e78-c52068d2b1df/manager/0.log" Feb 02 11:38:15 crc kubenswrapper[4838]: I0202 11:38:15.788697 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6687f8d877-hp6js_382292e1-fda7-4ab5-91e7-cf4ade4d6363/manager/0.log" Feb 02 11:38:15 crc kubenswrapper[4838]: I0202 11:38:15.862397 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-59c4b45c4dpm9bj_0a8916a2-6c71-4678-9a42-23b82b72f891/manager/0.log" Feb 02 11:38:16 crc kubenswrapper[4838]: I0202 11:38:16.096239 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-6d5fdc6cdc-dvqjc_228bb6dc-ac19-4dd3-aaa7-265cc00de1c9/operator/0.log" Feb 02 11:38:16 crc kubenswrapper[4838]: I0202 11:38:16.291294 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-svvg4_11d5435a-6771-4a54-b3f1-1f4f6bd2c123/registry-server/0.log" Feb 02 11:38:16 crc kubenswrapper[4838]: I0202 11:38:16.485659 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-788c46999f-lbjxl_e40ca74c-361a-4102-b7de-35464bb8821b/manager/0.log" Feb 02 11:38:16 crc kubenswrapper[4838]: I0202 11:38:16.553581 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5b964cf4cd-kbj4l_7ebd9e27-5249-4c31-86cd-200ec9c3b852/manager/0.log" Feb 02 11:38:16 crc kubenswrapper[4838]: I0202 11:38:16.888392 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-68fc8c869-8l8mb_d960011d-30b7-4eb4-9e06-1b8b9aa0a114/manager/0.log" Feb 02 11:38:16 crc kubenswrapper[4838]: I0202 11:38:16.951576 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-pljzb_a158eb1e-69b8-48ad-8061-a3e503981572/operator/0.log" Feb 02 11:38:17 crc kubenswrapper[4838]: I0202 11:38:17.305118 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-64b5b76f97-4d7p8_5e7863af-65e8-4d89-a434-fac6c13414cc/manager/0.log" Feb 02 11:38:17 crc kubenswrapper[4838]: I0202 11:38:17.335955 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-56f8bfcd9f-5xdv4_27825541-2816-4017-bba1-0f6f5946bb3c/manager/0.log" Feb 02 11:38:17 crc kubenswrapper[4838]: I0202 11:38:17.508701 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-564965969-h6lq5_149430e7-7b6d-44d0-a474-944271e7bb5e/manager/0.log" Feb 02 11:38:17 crc kubenswrapper[4838]: I0202 11:38:17.511748 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-5dcd749f76-wzqhl_83e1d0f5-af2b-4c12-abbd-712e18108a24/manager/0.log" Feb 02 11:38:35 crc kubenswrapper[4838]: I0202 11:38:35.694567 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-5nmhg_eb0d3aa3-09b5-4b68-833d-03218e1794f0/control-plane-machine-set-operator/0.log" Feb 02 11:38:36 crc kubenswrapper[4838]: I0202 11:38:35.999782 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-v475b_b49e2d2f-5155-49bf-82f3-b68992ebe787/machine-api-operator/0.log" Feb 02 11:38:36 crc kubenswrapper[4838]: I0202 11:38:36.169994 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-v475b_b49e2d2f-5155-49bf-82f3-b68992ebe787/kube-rbac-proxy/0.log" Feb 02 11:38:48 crc kubenswrapper[4838]: I0202 11:38:48.764572 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-t66nb"] Feb 02 11:38:48 crc kubenswrapper[4838]: E0202 11:38:48.766370 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba4d78ed-26ea-4568-8132-18d473c147b5" containerName="container-00" Feb 02 11:38:48 crc kubenswrapper[4838]: I0202 11:38:48.766445 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba4d78ed-26ea-4568-8132-18d473c147b5" containerName="container-00" Feb 02 11:38:48 crc kubenswrapper[4838]: I0202 11:38:48.766712 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba4d78ed-26ea-4568-8132-18d473c147b5" containerName="container-00" Feb 02 11:38:48 crc kubenswrapper[4838]: I0202 11:38:48.768075 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t66nb" Feb 02 11:38:48 crc kubenswrapper[4838]: I0202 11:38:48.794984 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-t66nb"] Feb 02 11:38:48 crc kubenswrapper[4838]: I0202 11:38:48.807730 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a19e1d99-5603-4981-9853-785d48c37edd-catalog-content\") pod \"redhat-marketplace-t66nb\" (UID: \"a19e1d99-5603-4981-9853-785d48c37edd\") " pod="openshift-marketplace/redhat-marketplace-t66nb" Feb 02 11:38:48 crc kubenswrapper[4838]: I0202 11:38:48.807782 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a19e1d99-5603-4981-9853-785d48c37edd-utilities\") pod \"redhat-marketplace-t66nb\" (UID: \"a19e1d99-5603-4981-9853-785d48c37edd\") " pod="openshift-marketplace/redhat-marketplace-t66nb" Feb 02 11:38:48 crc kubenswrapper[4838]: I0202 11:38:48.807878 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbvs8\" (UniqueName: \"kubernetes.io/projected/a19e1d99-5603-4981-9853-785d48c37edd-kube-api-access-rbvs8\") pod \"redhat-marketplace-t66nb\" (UID: \"a19e1d99-5603-4981-9853-785d48c37edd\") " pod="openshift-marketplace/redhat-marketplace-t66nb" Feb 02 11:38:48 crc kubenswrapper[4838]: I0202 11:38:48.909765 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a19e1d99-5603-4981-9853-785d48c37edd-catalog-content\") pod \"redhat-marketplace-t66nb\" (UID: \"a19e1d99-5603-4981-9853-785d48c37edd\") " pod="openshift-marketplace/redhat-marketplace-t66nb" Feb 02 11:38:48 crc kubenswrapper[4838]: I0202 11:38:48.909809 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a19e1d99-5603-4981-9853-785d48c37edd-utilities\") pod \"redhat-marketplace-t66nb\" (UID: \"a19e1d99-5603-4981-9853-785d48c37edd\") " pod="openshift-marketplace/redhat-marketplace-t66nb" Feb 02 11:38:48 crc kubenswrapper[4838]: I0202 11:38:48.909836 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbvs8\" (UniqueName: \"kubernetes.io/projected/a19e1d99-5603-4981-9853-785d48c37edd-kube-api-access-rbvs8\") pod \"redhat-marketplace-t66nb\" (UID: \"a19e1d99-5603-4981-9853-785d48c37edd\") " pod="openshift-marketplace/redhat-marketplace-t66nb" Feb 02 11:38:48 crc kubenswrapper[4838]: I0202 11:38:48.910706 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a19e1d99-5603-4981-9853-785d48c37edd-catalog-content\") pod \"redhat-marketplace-t66nb\" (UID: \"a19e1d99-5603-4981-9853-785d48c37edd\") " pod="openshift-marketplace/redhat-marketplace-t66nb" Feb 02 11:38:48 crc kubenswrapper[4838]: I0202 11:38:48.910885 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a19e1d99-5603-4981-9853-785d48c37edd-utilities\") pod \"redhat-marketplace-t66nb\" (UID: \"a19e1d99-5603-4981-9853-785d48c37edd\") " pod="openshift-marketplace/redhat-marketplace-t66nb" Feb 02 11:38:48 crc kubenswrapper[4838]: I0202 11:38:48.928563 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbvs8\" (UniqueName: \"kubernetes.io/projected/a19e1d99-5603-4981-9853-785d48c37edd-kube-api-access-rbvs8\") pod \"redhat-marketplace-t66nb\" (UID: \"a19e1d99-5603-4981-9853-785d48c37edd\") " pod="openshift-marketplace/redhat-marketplace-t66nb" Feb 02 11:38:49 crc kubenswrapper[4838]: I0202 11:38:49.125785 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t66nb" Feb 02 11:38:49 crc kubenswrapper[4838]: I0202 11:38:49.409340 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-858654f9db-cl9xl_b7b5f720-0add-47c9-890a-4ca936379c93/cert-manager-controller/0.log" Feb 02 11:38:49 crc kubenswrapper[4838]: I0202 11:38:49.608970 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-t66nb"] Feb 02 11:38:49 crc kubenswrapper[4838]: I0202 11:38:49.649339 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-cf98fcc89-t4jjz_5206c985-0926-498a-b2ae-3be3a5034206/cert-manager-cainjector/0.log" Feb 02 11:38:49 crc kubenswrapper[4838]: I0202 11:38:49.792990 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-687f57d79b-tvmg6_59de6fa5-ca64-482d-81af-d1bfd5e7cba4/cert-manager-webhook/0.log" Feb 02 11:38:50 crc kubenswrapper[4838]: I0202 11:38:50.461837 4838 generic.go:334] "Generic (PLEG): container finished" podID="a19e1d99-5603-4981-9853-785d48c37edd" containerID="5ba185407ec684f9f210fb813d6d5db13622cb30e9beada54669157cc622527a" exitCode=0 Feb 02 11:38:50 crc kubenswrapper[4838]: I0202 11:38:50.461925 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t66nb" event={"ID":"a19e1d99-5603-4981-9853-785d48c37edd","Type":"ContainerDied","Data":"5ba185407ec684f9f210fb813d6d5db13622cb30e9beada54669157cc622527a"} Feb 02 11:38:50 crc kubenswrapper[4838]: I0202 11:38:50.462108 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t66nb" event={"ID":"a19e1d99-5603-4981-9853-785d48c37edd","Type":"ContainerStarted","Data":"7a4cfddcc62623b0a65ff3d519be53c9e25cdfdc751567407a029ebb39dc6922"} Feb 02 11:38:50 crc kubenswrapper[4838]: I0202 11:38:50.464064 4838 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 11:38:51 crc kubenswrapper[4838]: I0202 11:38:51.470018 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t66nb" event={"ID":"a19e1d99-5603-4981-9853-785d48c37edd","Type":"ContainerStarted","Data":"f587faf5da7d11f4b341a6cec631f4916a9c86e658d86f707e1212af84058578"} Feb 02 11:38:51 crc kubenswrapper[4838]: I0202 11:38:51.560323 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dlzfd"] Feb 02 11:38:51 crc kubenswrapper[4838]: I0202 11:38:51.563840 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dlzfd" Feb 02 11:38:51 crc kubenswrapper[4838]: I0202 11:38:51.582772 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dlzfd"] Feb 02 11:38:51 crc kubenswrapper[4838]: I0202 11:38:51.667212 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed8ba392-0709-4915-97e0-df8de5cdb41b-utilities\") pod \"redhat-operators-dlzfd\" (UID: \"ed8ba392-0709-4915-97e0-df8de5cdb41b\") " pod="openshift-marketplace/redhat-operators-dlzfd" Feb 02 11:38:51 crc kubenswrapper[4838]: I0202 11:38:51.667269 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svrqg\" (UniqueName: \"kubernetes.io/projected/ed8ba392-0709-4915-97e0-df8de5cdb41b-kube-api-access-svrqg\") pod \"redhat-operators-dlzfd\" (UID: \"ed8ba392-0709-4915-97e0-df8de5cdb41b\") " pod="openshift-marketplace/redhat-operators-dlzfd" Feb 02 11:38:51 crc kubenswrapper[4838]: I0202 11:38:51.668577 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed8ba392-0709-4915-97e0-df8de5cdb41b-catalog-content\") pod \"redhat-operators-dlzfd\" (UID: \"ed8ba392-0709-4915-97e0-df8de5cdb41b\") " pod="openshift-marketplace/redhat-operators-dlzfd" Feb 02 11:38:51 crc kubenswrapper[4838]: I0202 11:38:51.770582 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed8ba392-0709-4915-97e0-df8de5cdb41b-utilities\") pod \"redhat-operators-dlzfd\" (UID: \"ed8ba392-0709-4915-97e0-df8de5cdb41b\") " pod="openshift-marketplace/redhat-operators-dlzfd" Feb 02 11:38:51 crc kubenswrapper[4838]: I0202 11:38:51.770976 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svrqg\" (UniqueName: \"kubernetes.io/projected/ed8ba392-0709-4915-97e0-df8de5cdb41b-kube-api-access-svrqg\") pod \"redhat-operators-dlzfd\" (UID: \"ed8ba392-0709-4915-97e0-df8de5cdb41b\") " pod="openshift-marketplace/redhat-operators-dlzfd" Feb 02 11:38:51 crc kubenswrapper[4838]: I0202 11:38:51.771119 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed8ba392-0709-4915-97e0-df8de5cdb41b-utilities\") pod \"redhat-operators-dlzfd\" (UID: \"ed8ba392-0709-4915-97e0-df8de5cdb41b\") " pod="openshift-marketplace/redhat-operators-dlzfd" Feb 02 11:38:51 crc kubenswrapper[4838]: I0202 11:38:51.771441 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed8ba392-0709-4915-97e0-df8de5cdb41b-catalog-content\") pod \"redhat-operators-dlzfd\" (UID: \"ed8ba392-0709-4915-97e0-df8de5cdb41b\") " pod="openshift-marketplace/redhat-operators-dlzfd" Feb 02 11:38:51 crc kubenswrapper[4838]: I0202 11:38:51.771841 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed8ba392-0709-4915-97e0-df8de5cdb41b-catalog-content\") pod \"redhat-operators-dlzfd\" (UID: \"ed8ba392-0709-4915-97e0-df8de5cdb41b\") " pod="openshift-marketplace/redhat-operators-dlzfd" Feb 02 11:38:51 crc kubenswrapper[4838]: I0202 11:38:51.797659 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svrqg\" (UniqueName: \"kubernetes.io/projected/ed8ba392-0709-4915-97e0-df8de5cdb41b-kube-api-access-svrqg\") pod \"redhat-operators-dlzfd\" (UID: \"ed8ba392-0709-4915-97e0-df8de5cdb41b\") " pod="openshift-marketplace/redhat-operators-dlzfd" Feb 02 11:38:51 crc kubenswrapper[4838]: I0202 11:38:51.924177 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dlzfd" Feb 02 11:38:52 crc kubenswrapper[4838]: I0202 11:38:52.463375 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dlzfd"] Feb 02 11:38:52 crc kubenswrapper[4838]: I0202 11:38:52.480135 4838 generic.go:334] "Generic (PLEG): container finished" podID="a19e1d99-5603-4981-9853-785d48c37edd" containerID="f587faf5da7d11f4b341a6cec631f4916a9c86e658d86f707e1212af84058578" exitCode=0 Feb 02 11:38:52 crc kubenswrapper[4838]: I0202 11:38:52.480186 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t66nb" event={"ID":"a19e1d99-5603-4981-9853-785d48c37edd","Type":"ContainerDied","Data":"f587faf5da7d11f4b341a6cec631f4916a9c86e658d86f707e1212af84058578"} Feb 02 11:38:52 crc kubenswrapper[4838]: I0202 11:38:52.481530 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dlzfd" event={"ID":"ed8ba392-0709-4915-97e0-df8de5cdb41b","Type":"ContainerStarted","Data":"06939725247fd537606d3a55ca595ecd941a22b635b136a17ac76ca57bb1e7ed"} Feb 02 11:38:53 crc kubenswrapper[4838]: I0202 11:38:53.490937 4838 generic.go:334] "Generic (PLEG): container finished" podID="ed8ba392-0709-4915-97e0-df8de5cdb41b" containerID="9da046f20367b0c2fcf584373524d4c5b615e92f2a239bdb4fa66a42b475b426" exitCode=0 Feb 02 11:38:53 crc kubenswrapper[4838]: I0202 11:38:53.491039 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dlzfd" event={"ID":"ed8ba392-0709-4915-97e0-df8de5cdb41b","Type":"ContainerDied","Data":"9da046f20367b0c2fcf584373524d4c5b615e92f2a239bdb4fa66a42b475b426"} Feb 02 11:38:53 crc kubenswrapper[4838]: I0202 11:38:53.494469 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t66nb" event={"ID":"a19e1d99-5603-4981-9853-785d48c37edd","Type":"ContainerStarted","Data":"9601723369ccaa0dfc40565cef81a74e2165774f0e024e37afcc4c7f9d71ba4e"} Feb 02 11:38:53 crc kubenswrapper[4838]: I0202 11:38:53.530224 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-t66nb" podStartSLOduration=3.087688735 podStartE2EDuration="5.530205624s" podCreationTimestamp="2026-02-02 11:38:48 +0000 UTC" firstStartedPulling="2026-02-02 11:38:50.463794352 +0000 UTC m=+2724.800895380" lastFinishedPulling="2026-02-02 11:38:52.906311241 +0000 UTC m=+2727.243412269" observedRunningTime="2026-02-02 11:38:53.528149459 +0000 UTC m=+2727.865250507" watchObservedRunningTime="2026-02-02 11:38:53.530205624 +0000 UTC m=+2727.867306652" Feb 02 11:38:55 crc kubenswrapper[4838]: I0202 11:38:55.518057 4838 generic.go:334] "Generic (PLEG): container finished" podID="ed8ba392-0709-4915-97e0-df8de5cdb41b" containerID="b6a86b04fffc65175845fb0efe66aa0171de6f2031cf3d0cbaaca9bf27b32115" exitCode=0 Feb 02 11:38:55 crc kubenswrapper[4838]: I0202 11:38:55.518110 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dlzfd" event={"ID":"ed8ba392-0709-4915-97e0-df8de5cdb41b","Type":"ContainerDied","Data":"b6a86b04fffc65175845fb0efe66aa0171de6f2031cf3d0cbaaca9bf27b32115"} Feb 02 11:38:56 crc kubenswrapper[4838]: I0202 11:38:56.528475 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dlzfd" event={"ID":"ed8ba392-0709-4915-97e0-df8de5cdb41b","Type":"ContainerStarted","Data":"a4898b120ac460235d74da9a99d099e31a068ea177b5524c69315c7af41395b4"} Feb 02 11:38:56 crc kubenswrapper[4838]: I0202 11:38:56.550427 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dlzfd" podStartSLOduration=3.115789472 podStartE2EDuration="5.550406747s" podCreationTimestamp="2026-02-02 11:38:51 +0000 UTC" firstStartedPulling="2026-02-02 11:38:53.493824851 +0000 UTC m=+2727.830925879" lastFinishedPulling="2026-02-02 11:38:55.928442106 +0000 UTC m=+2730.265543154" observedRunningTime="2026-02-02 11:38:56.543403948 +0000 UTC m=+2730.880504996" watchObservedRunningTime="2026-02-02 11:38:56.550406747 +0000 UTC m=+2730.887507775" Feb 02 11:38:59 crc kubenswrapper[4838]: I0202 11:38:59.126721 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-t66nb" Feb 02 11:38:59 crc kubenswrapper[4838]: I0202 11:38:59.127095 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-t66nb" Feb 02 11:38:59 crc kubenswrapper[4838]: I0202 11:38:59.177477 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-t66nb" Feb 02 11:38:59 crc kubenswrapper[4838]: I0202 11:38:59.604886 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-t66nb" Feb 02 11:39:00 crc kubenswrapper[4838]: I0202 11:39:00.359990 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-t66nb"] Feb 02 11:39:01 crc kubenswrapper[4838]: I0202 11:39:01.563315 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-t66nb" podUID="a19e1d99-5603-4981-9853-785d48c37edd" containerName="registry-server" containerID="cri-o://9601723369ccaa0dfc40565cef81a74e2165774f0e024e37afcc4c7f9d71ba4e" gracePeriod=2 Feb 02 11:39:01 crc kubenswrapper[4838]: I0202 11:39:01.925250 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dlzfd" Feb 02 11:39:01 crc kubenswrapper[4838]: I0202 11:39:01.925410 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dlzfd" Feb 02 11:39:01 crc kubenswrapper[4838]: I0202 11:39:01.982045 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dlzfd" Feb 02 11:39:02 crc kubenswrapper[4838]: I0202 11:39:02.578181 4838 generic.go:334] "Generic (PLEG): container finished" podID="a19e1d99-5603-4981-9853-785d48c37edd" containerID="9601723369ccaa0dfc40565cef81a74e2165774f0e024e37afcc4c7f9d71ba4e" exitCode=0 Feb 02 11:39:02 crc kubenswrapper[4838]: I0202 11:39:02.579836 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t66nb" event={"ID":"a19e1d99-5603-4981-9853-785d48c37edd","Type":"ContainerDied","Data":"9601723369ccaa0dfc40565cef81a74e2165774f0e024e37afcc4c7f9d71ba4e"} Feb 02 11:39:02 crc kubenswrapper[4838]: I0202 11:39:02.666193 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dlzfd" Feb 02 11:39:02 crc kubenswrapper[4838]: I0202 11:39:02.886850 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t66nb" Feb 02 11:39:02 crc kubenswrapper[4838]: I0202 11:39:02.992706 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbvs8\" (UniqueName: \"kubernetes.io/projected/a19e1d99-5603-4981-9853-785d48c37edd-kube-api-access-rbvs8\") pod \"a19e1d99-5603-4981-9853-785d48c37edd\" (UID: \"a19e1d99-5603-4981-9853-785d48c37edd\") " Feb 02 11:39:02 crc kubenswrapper[4838]: I0202 11:39:02.994258 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a19e1d99-5603-4981-9853-785d48c37edd-catalog-content\") pod \"a19e1d99-5603-4981-9853-785d48c37edd\" (UID: \"a19e1d99-5603-4981-9853-785d48c37edd\") " Feb 02 11:39:02 crc kubenswrapper[4838]: I0202 11:39:02.994447 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a19e1d99-5603-4981-9853-785d48c37edd-utilities\") pod \"a19e1d99-5603-4981-9853-785d48c37edd\" (UID: \"a19e1d99-5603-4981-9853-785d48c37edd\") " Feb 02 11:39:02 crc kubenswrapper[4838]: I0202 11:39:02.995209 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a19e1d99-5603-4981-9853-785d48c37edd-utilities" (OuterVolumeSpecName: "utilities") pod "a19e1d99-5603-4981-9853-785d48c37edd" (UID: "a19e1d99-5603-4981-9853-785d48c37edd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:39:02 crc kubenswrapper[4838]: I0202 11:39:02.995488 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a19e1d99-5603-4981-9853-785d48c37edd-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 11:39:02 crc kubenswrapper[4838]: I0202 11:39:02.999645 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a19e1d99-5603-4981-9853-785d48c37edd-kube-api-access-rbvs8" (OuterVolumeSpecName: "kube-api-access-rbvs8") pod "a19e1d99-5603-4981-9853-785d48c37edd" (UID: "a19e1d99-5603-4981-9853-785d48c37edd"). InnerVolumeSpecName "kube-api-access-rbvs8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:39:03 crc kubenswrapper[4838]: I0202 11:39:03.023648 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a19e1d99-5603-4981-9853-785d48c37edd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a19e1d99-5603-4981-9853-785d48c37edd" (UID: "a19e1d99-5603-4981-9853-785d48c37edd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:39:03 crc kubenswrapper[4838]: I0202 11:39:03.096419 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbvs8\" (UniqueName: \"kubernetes.io/projected/a19e1d99-5603-4981-9853-785d48c37edd-kube-api-access-rbvs8\") on node \"crc\" DevicePath \"\"" Feb 02 11:39:03 crc kubenswrapper[4838]: I0202 11:39:03.096455 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a19e1d99-5603-4981-9853-785d48c37edd-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 11:39:03 crc kubenswrapper[4838]: I0202 11:39:03.591874 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t66nb" Feb 02 11:39:03 crc kubenswrapper[4838]: I0202 11:39:03.592761 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t66nb" event={"ID":"a19e1d99-5603-4981-9853-785d48c37edd","Type":"ContainerDied","Data":"7a4cfddcc62623b0a65ff3d519be53c9e25cdfdc751567407a029ebb39dc6922"} Feb 02 11:39:03 crc kubenswrapper[4838]: I0202 11:39:03.592836 4838 scope.go:117] "RemoveContainer" containerID="9601723369ccaa0dfc40565cef81a74e2165774f0e024e37afcc4c7f9d71ba4e" Feb 02 11:39:03 crc kubenswrapper[4838]: I0202 11:39:03.619914 4838 scope.go:117] "RemoveContainer" containerID="f587faf5da7d11f4b341a6cec631f4916a9c86e658d86f707e1212af84058578" Feb 02 11:39:03 crc kubenswrapper[4838]: I0202 11:39:03.624752 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-t66nb"] Feb 02 11:39:03 crc kubenswrapper[4838]: I0202 11:39:03.636875 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-t66nb"] Feb 02 11:39:03 crc kubenswrapper[4838]: I0202 11:39:03.644675 4838 scope.go:117] "RemoveContainer" containerID="5ba185407ec684f9f210fb813d6d5db13622cb30e9beada54669157cc622527a" Feb 02 11:39:03 crc kubenswrapper[4838]: I0202 11:39:03.717384 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-q98cv_4cbd203f-2073-4bee-8234-da99cf46562b/nmstate-console-plugin/0.log" Feb 02 11:39:03 crc kubenswrapper[4838]: I0202 11:39:03.874297 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-ch2vl_539fff34-8b11-42d9-b32f-4c1cab281cf5/nmstate-handler/0.log" Feb 02 11:39:03 crc kubenswrapper[4838]: I0202 11:39:03.905434 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-q9xb2_c3132ec2-5218-4b6f-8e19-dfce93103b19/kube-rbac-proxy/0.log" Feb 02 11:39:03 crc kubenswrapper[4838]: I0202 11:39:03.945031 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-q9xb2_c3132ec2-5218-4b6f-8e19-dfce93103b19/nmstate-metrics/0.log" Feb 02 11:39:04 crc kubenswrapper[4838]: I0202 11:39:04.065771 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-bggr6_eac05e66-32ef-40ab-833b-ffdb87e12159/nmstate-operator/0.log" Feb 02 11:39:04 crc kubenswrapper[4838]: I0202 11:39:04.119786 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-nwk6k_b94f73b7-7c9a-4c88-9180-76861894189e/nmstate-webhook/0.log" Feb 02 11:39:04 crc kubenswrapper[4838]: I0202 11:39:04.517509 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a19e1d99-5603-4981-9853-785d48c37edd" path="/var/lib/kubelet/pods/a19e1d99-5603-4981-9853-785d48c37edd/volumes" Feb 02 11:39:04 crc kubenswrapper[4838]: I0202 11:39:04.560181 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dlzfd"] Feb 02 11:39:05 crc kubenswrapper[4838]: I0202 11:39:05.607370 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dlzfd" podUID="ed8ba392-0709-4915-97e0-df8de5cdb41b" containerName="registry-server" containerID="cri-o://a4898b120ac460235d74da9a99d099e31a068ea177b5524c69315c7af41395b4" gracePeriod=2 Feb 02 11:39:06 crc kubenswrapper[4838]: I0202 11:39:06.624035 4838 generic.go:334] "Generic (PLEG): container finished" podID="ed8ba392-0709-4915-97e0-df8de5cdb41b" containerID="a4898b120ac460235d74da9a99d099e31a068ea177b5524c69315c7af41395b4" exitCode=0 Feb 02 11:39:06 crc kubenswrapper[4838]: I0202 11:39:06.624290 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dlzfd" event={"ID":"ed8ba392-0709-4915-97e0-df8de5cdb41b","Type":"ContainerDied","Data":"a4898b120ac460235d74da9a99d099e31a068ea177b5524c69315c7af41395b4"} Feb 02 11:39:06 crc kubenswrapper[4838]: I0202 11:39:06.733587 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dlzfd" Feb 02 11:39:06 crc kubenswrapper[4838]: I0202 11:39:06.781429 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed8ba392-0709-4915-97e0-df8de5cdb41b-utilities\") pod \"ed8ba392-0709-4915-97e0-df8de5cdb41b\" (UID: \"ed8ba392-0709-4915-97e0-df8de5cdb41b\") " Feb 02 11:39:06 crc kubenswrapper[4838]: I0202 11:39:06.781484 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed8ba392-0709-4915-97e0-df8de5cdb41b-catalog-content\") pod \"ed8ba392-0709-4915-97e0-df8de5cdb41b\" (UID: \"ed8ba392-0709-4915-97e0-df8de5cdb41b\") " Feb 02 11:39:06 crc kubenswrapper[4838]: I0202 11:39:06.781527 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svrqg\" (UniqueName: \"kubernetes.io/projected/ed8ba392-0709-4915-97e0-df8de5cdb41b-kube-api-access-svrqg\") pod \"ed8ba392-0709-4915-97e0-df8de5cdb41b\" (UID: \"ed8ba392-0709-4915-97e0-df8de5cdb41b\") " Feb 02 11:39:06 crc kubenswrapper[4838]: I0202 11:39:06.782548 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed8ba392-0709-4915-97e0-df8de5cdb41b-utilities" (OuterVolumeSpecName: "utilities") pod "ed8ba392-0709-4915-97e0-df8de5cdb41b" (UID: "ed8ba392-0709-4915-97e0-df8de5cdb41b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:39:06 crc kubenswrapper[4838]: I0202 11:39:06.786529 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed8ba392-0709-4915-97e0-df8de5cdb41b-kube-api-access-svrqg" (OuterVolumeSpecName: "kube-api-access-svrqg") pod "ed8ba392-0709-4915-97e0-df8de5cdb41b" (UID: "ed8ba392-0709-4915-97e0-df8de5cdb41b"). InnerVolumeSpecName "kube-api-access-svrqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:39:06 crc kubenswrapper[4838]: I0202 11:39:06.883859 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed8ba392-0709-4915-97e0-df8de5cdb41b-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 11:39:06 crc kubenswrapper[4838]: I0202 11:39:06.883916 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svrqg\" (UniqueName: \"kubernetes.io/projected/ed8ba392-0709-4915-97e0-df8de5cdb41b-kube-api-access-svrqg\") on node \"crc\" DevicePath \"\"" Feb 02 11:39:06 crc kubenswrapper[4838]: I0202 11:39:06.908308 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed8ba392-0709-4915-97e0-df8de5cdb41b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ed8ba392-0709-4915-97e0-df8de5cdb41b" (UID: "ed8ba392-0709-4915-97e0-df8de5cdb41b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:39:06 crc kubenswrapper[4838]: I0202 11:39:06.986063 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed8ba392-0709-4915-97e0-df8de5cdb41b-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 11:39:07 crc kubenswrapper[4838]: I0202 11:39:07.634906 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dlzfd" event={"ID":"ed8ba392-0709-4915-97e0-df8de5cdb41b","Type":"ContainerDied","Data":"06939725247fd537606d3a55ca595ecd941a22b635b136a17ac76ca57bb1e7ed"} Feb 02 11:39:07 crc kubenswrapper[4838]: I0202 11:39:07.635437 4838 scope.go:117] "RemoveContainer" containerID="a4898b120ac460235d74da9a99d099e31a068ea177b5524c69315c7af41395b4" Feb 02 11:39:07 crc kubenswrapper[4838]: I0202 11:39:07.634963 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dlzfd" Feb 02 11:39:07 crc kubenswrapper[4838]: I0202 11:39:07.669848 4838 scope.go:117] "RemoveContainer" containerID="b6a86b04fffc65175845fb0efe66aa0171de6f2031cf3d0cbaaca9bf27b32115" Feb 02 11:39:07 crc kubenswrapper[4838]: I0202 11:39:07.675385 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dlzfd"] Feb 02 11:39:07 crc kubenswrapper[4838]: I0202 11:39:07.691284 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dlzfd"] Feb 02 11:39:07 crc kubenswrapper[4838]: I0202 11:39:07.693140 4838 scope.go:117] "RemoveContainer" containerID="9da046f20367b0c2fcf584373524d4c5b615e92f2a239bdb4fa66a42b475b426" Feb 02 11:39:08 crc kubenswrapper[4838]: I0202 11:39:08.517008 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed8ba392-0709-4915-97e0-df8de5cdb41b" path="/var/lib/kubelet/pods/ed8ba392-0709-4915-97e0-df8de5cdb41b/volumes" Feb 02 11:39:30 crc kubenswrapper[4838]: I0202 11:39:30.236843 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-7kdtd_b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4/kube-rbac-proxy/0.log" Feb 02 11:39:30 crc kubenswrapper[4838]: I0202 11:39:30.391770 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-7kdtd_b92f99a9-4bbd-4f01-9b6e-8cb3e0fab6b4/controller/0.log" Feb 02 11:39:30 crc kubenswrapper[4838]: I0202 11:39:30.467558 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-frr-files/0.log" Feb 02 11:39:30 crc kubenswrapper[4838]: I0202 11:39:30.656913 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-frr-files/0.log" Feb 02 11:39:30 crc kubenswrapper[4838]: I0202 11:39:30.684269 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-reloader/0.log" Feb 02 11:39:30 crc kubenswrapper[4838]: I0202 11:39:30.688287 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-metrics/0.log" Feb 02 11:39:30 crc kubenswrapper[4838]: I0202 11:39:30.748550 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-reloader/0.log" Feb 02 11:39:30 crc kubenswrapper[4838]: I0202 11:39:30.904105 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-metrics/0.log" Feb 02 11:39:30 crc kubenswrapper[4838]: I0202 11:39:30.907095 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-frr-files/0.log" Feb 02 11:39:30 crc kubenswrapper[4838]: I0202 11:39:30.935312 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-reloader/0.log" Feb 02 11:39:30 crc kubenswrapper[4838]: I0202 11:39:30.942572 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-metrics/0.log" Feb 02 11:39:31 crc kubenswrapper[4838]: I0202 11:39:31.105467 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-metrics/0.log" Feb 02 11:39:31 crc kubenswrapper[4838]: I0202 11:39:31.106644 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-reloader/0.log" Feb 02 11:39:31 crc kubenswrapper[4838]: I0202 11:39:31.130172 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/cp-frr-files/0.log" Feb 02 11:39:31 crc kubenswrapper[4838]: I0202 11:39:31.147761 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/controller/0.log" Feb 02 11:39:31 crc kubenswrapper[4838]: I0202 11:39:31.293820 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/kube-rbac-proxy/0.log" Feb 02 11:39:31 crc kubenswrapper[4838]: I0202 11:39:31.328012 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/kube-rbac-proxy-frr/0.log" Feb 02 11:39:31 crc kubenswrapper[4838]: I0202 11:39:31.339545 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/frr-metrics/0.log" Feb 02 11:39:31 crc kubenswrapper[4838]: I0202 11:39:31.538653 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/reloader/0.log" Feb 02 11:39:31 crc kubenswrapper[4838]: I0202 11:39:31.543675 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-czj6m_80562d51-943d-4213-abbd-099b4e891ce9/frr-k8s-webhook-server/0.log" Feb 02 11:39:31 crc kubenswrapper[4838]: I0202 11:39:31.751415 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-67dbdd759f-klfvs_734aaa76-0e63-4bf4-9b2d-60a0346dfcac/manager/0.log" Feb 02 11:39:31 crc kubenswrapper[4838]: I0202 11:39:31.904365 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-b56888666-82h5d_ffd0594c-3abc-4b1a-89e4-0face9bad35f/webhook-server/0.log" Feb 02 11:39:32 crc kubenswrapper[4838]: I0202 11:39:32.078269 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-pf2fp_306c1a4f-3d28-4cc9-91bd-a78c25803845/kube-rbac-proxy/0.log" Feb 02 11:39:32 crc kubenswrapper[4838]: I0202 11:39:32.556378 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ltddp_b7904c65-d8f0-4fd2-bcf9-f69d1058c6b6/frr/0.log" Feb 02 11:39:32 crc kubenswrapper[4838]: I0202 11:39:32.571343 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-pf2fp_306c1a4f-3d28-4cc9-91bd-a78c25803845/speaker/0.log" Feb 02 11:39:45 crc kubenswrapper[4838]: I0202 11:39:45.029358 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g_5d5d76da-96c7-47aa-aeb7-e176ab3b89d3/util/0.log" Feb 02 11:39:45 crc kubenswrapper[4838]: I0202 11:39:45.291142 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g_5d5d76da-96c7-47aa-aeb7-e176ab3b89d3/pull/0.log" Feb 02 11:39:45 crc kubenswrapper[4838]: I0202 11:39:45.304133 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g_5d5d76da-96c7-47aa-aeb7-e176ab3b89d3/util/0.log" Feb 02 11:39:45 crc kubenswrapper[4838]: I0202 11:39:45.335248 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g_5d5d76da-96c7-47aa-aeb7-e176ab3b89d3/pull/0.log" Feb 02 11:39:45 crc kubenswrapper[4838]: I0202 11:39:45.490916 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g_5d5d76da-96c7-47aa-aeb7-e176ab3b89d3/util/0.log" Feb 02 11:39:45 crc kubenswrapper[4838]: I0202 11:39:45.493858 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g_5d5d76da-96c7-47aa-aeb7-e176ab3b89d3/pull/0.log" Feb 02 11:39:45 crc kubenswrapper[4838]: I0202 11:39:45.496826 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcw7v7g_5d5d76da-96c7-47aa-aeb7-e176ab3b89d3/extract/0.log" Feb 02 11:39:45 crc kubenswrapper[4838]: I0202 11:39:45.684370 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl_a250321d-30c9-426a-b638-90dd5e9c036d/util/0.log" Feb 02 11:39:45 crc kubenswrapper[4838]: I0202 11:39:45.831693 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl_a250321d-30c9-426a-b638-90dd5e9c036d/util/0.log" Feb 02 11:39:45 crc kubenswrapper[4838]: I0202 11:39:45.852900 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl_a250321d-30c9-426a-b638-90dd5e9c036d/pull/0.log" Feb 02 11:39:45 crc kubenswrapper[4838]: I0202 11:39:45.886612 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl_a250321d-30c9-426a-b638-90dd5e9c036d/pull/0.log" Feb 02 11:39:46 crc kubenswrapper[4838]: I0202 11:39:46.047303 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl_a250321d-30c9-426a-b638-90dd5e9c036d/util/0.log" Feb 02 11:39:46 crc kubenswrapper[4838]: I0202 11:39:46.058385 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl_a250321d-30c9-426a-b638-90dd5e9c036d/pull/0.log" Feb 02 11:39:46 crc kubenswrapper[4838]: I0202 11:39:46.080702 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713j29hl_a250321d-30c9-426a-b638-90dd5e9c036d/extract/0.log" Feb 02 11:39:46 crc kubenswrapper[4838]: I0202 11:39:46.207131 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tdj7s_6da618e6-95eb-478d-a290-fb44dfef06f7/extract-utilities/0.log" Feb 02 11:39:46 crc kubenswrapper[4838]: I0202 11:39:46.362554 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tdj7s_6da618e6-95eb-478d-a290-fb44dfef06f7/extract-content/0.log" Feb 02 11:39:46 crc kubenswrapper[4838]: I0202 11:39:46.362778 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tdj7s_6da618e6-95eb-478d-a290-fb44dfef06f7/extract-utilities/0.log" Feb 02 11:39:46 crc kubenswrapper[4838]: I0202 11:39:46.368301 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tdj7s_6da618e6-95eb-478d-a290-fb44dfef06f7/extract-content/0.log" Feb 02 11:39:46 crc kubenswrapper[4838]: I0202 11:39:46.575024 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tdj7s_6da618e6-95eb-478d-a290-fb44dfef06f7/extract-utilities/0.log" Feb 02 11:39:46 crc kubenswrapper[4838]: I0202 11:39:46.580420 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tdj7s_6da618e6-95eb-478d-a290-fb44dfef06f7/extract-content/0.log" Feb 02 11:39:46 crc kubenswrapper[4838]: I0202 11:39:46.767516 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dcrh2_8d8db2f1-c01c-4848-923a-f4fb42f7d2be/extract-utilities/0.log" Feb 02 11:39:46 crc kubenswrapper[4838]: I0202 11:39:46.967859 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tdj7s_6da618e6-95eb-478d-a290-fb44dfef06f7/registry-server/0.log" Feb 02 11:39:47 crc kubenswrapper[4838]: I0202 11:39:47.015501 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dcrh2_8d8db2f1-c01c-4848-923a-f4fb42f7d2be/extract-content/0.log" Feb 02 11:39:47 crc kubenswrapper[4838]: I0202 11:39:47.016069 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dcrh2_8d8db2f1-c01c-4848-923a-f4fb42f7d2be/extract-content/0.log" Feb 02 11:39:47 crc kubenswrapper[4838]: I0202 11:39:47.019136 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dcrh2_8d8db2f1-c01c-4848-923a-f4fb42f7d2be/extract-utilities/0.log" Feb 02 11:39:47 crc kubenswrapper[4838]: I0202 11:39:47.201861 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dcrh2_8d8db2f1-c01c-4848-923a-f4fb42f7d2be/extract-utilities/0.log" Feb 02 11:39:47 crc kubenswrapper[4838]: I0202 11:39:47.267024 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dcrh2_8d8db2f1-c01c-4848-923a-f4fb42f7d2be/extract-content/0.log" Feb 02 11:39:47 crc kubenswrapper[4838]: I0202 11:39:47.434294 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-2kxfd_5214b35a-0948-41f6-b2d1-0dfc43009812/marketplace-operator/0.log" Feb 02 11:39:47 crc kubenswrapper[4838]: I0202 11:39:47.623521 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2f7cb_d5cef155-b12d-4e9b-81b7-9a224b8fe5b3/extract-utilities/0.log" Feb 02 11:39:47 crc kubenswrapper[4838]: I0202 11:39:47.680630 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dcrh2_8d8db2f1-c01c-4848-923a-f4fb42f7d2be/registry-server/0.log" Feb 02 11:39:47 crc kubenswrapper[4838]: I0202 11:39:47.755012 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2f7cb_d5cef155-b12d-4e9b-81b7-9a224b8fe5b3/extract-utilities/0.log" Feb 02 11:39:47 crc kubenswrapper[4838]: I0202 11:39:47.780508 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2f7cb_d5cef155-b12d-4e9b-81b7-9a224b8fe5b3/extract-content/0.log" Feb 02 11:39:47 crc kubenswrapper[4838]: I0202 11:39:47.809140 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2f7cb_d5cef155-b12d-4e9b-81b7-9a224b8fe5b3/extract-content/0.log" Feb 02 11:39:48 crc kubenswrapper[4838]: I0202 11:39:48.051597 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2f7cb_d5cef155-b12d-4e9b-81b7-9a224b8fe5b3/extract-utilities/0.log" Feb 02 11:39:48 crc kubenswrapper[4838]: I0202 11:39:48.051679 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2f7cb_d5cef155-b12d-4e9b-81b7-9a224b8fe5b3/extract-content/0.log" Feb 02 11:39:48 crc kubenswrapper[4838]: I0202 11:39:48.090417 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2f7cb_d5cef155-b12d-4e9b-81b7-9a224b8fe5b3/registry-server/0.log" Feb 02 11:39:48 crc kubenswrapper[4838]: I0202 11:39:48.217830 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-62q4t_71ec42fd-382d-43bd-9353-24e15ac2e795/extract-utilities/0.log" Feb 02 11:39:48 crc kubenswrapper[4838]: I0202 11:39:48.401462 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-62q4t_71ec42fd-382d-43bd-9353-24e15ac2e795/extract-content/0.log" Feb 02 11:39:48 crc kubenswrapper[4838]: I0202 11:39:48.402570 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-62q4t_71ec42fd-382d-43bd-9353-24e15ac2e795/extract-utilities/0.log" Feb 02 11:39:48 crc kubenswrapper[4838]: I0202 11:39:48.415071 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-62q4t_71ec42fd-382d-43bd-9353-24e15ac2e795/extract-content/0.log" Feb 02 11:39:48 crc kubenswrapper[4838]: I0202 11:39:48.537298 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-62q4t_71ec42fd-382d-43bd-9353-24e15ac2e795/extract-utilities/0.log" Feb 02 11:39:48 crc kubenswrapper[4838]: I0202 11:39:48.566156 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-62q4t_71ec42fd-382d-43bd-9353-24e15ac2e795/extract-content/0.log" Feb 02 11:39:49 crc kubenswrapper[4838]: I0202 11:39:49.035060 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-62q4t_71ec42fd-382d-43bd-9353-24e15ac2e795/registry-server/0.log" Feb 02 11:40:05 crc kubenswrapper[4838]: I0202 11:40:05.679585 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7vlxg"] Feb 02 11:40:05 crc kubenswrapper[4838]: E0202 11:40:05.680658 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a19e1d99-5603-4981-9853-785d48c37edd" containerName="extract-utilities" Feb 02 11:40:05 crc kubenswrapper[4838]: I0202 11:40:05.680677 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="a19e1d99-5603-4981-9853-785d48c37edd" containerName="extract-utilities" Feb 02 11:40:05 crc kubenswrapper[4838]: E0202 11:40:05.680696 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed8ba392-0709-4915-97e0-df8de5cdb41b" containerName="extract-content" Feb 02 11:40:05 crc kubenswrapper[4838]: I0202 11:40:05.680703 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed8ba392-0709-4915-97e0-df8de5cdb41b" containerName="extract-content" Feb 02 11:40:05 crc kubenswrapper[4838]: E0202 11:40:05.680713 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed8ba392-0709-4915-97e0-df8de5cdb41b" containerName="registry-server" Feb 02 11:40:05 crc kubenswrapper[4838]: I0202 11:40:05.680720 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed8ba392-0709-4915-97e0-df8de5cdb41b" containerName="registry-server" Feb 02 11:40:05 crc kubenswrapper[4838]: E0202 11:40:05.680746 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a19e1d99-5603-4981-9853-785d48c37edd" containerName="registry-server" Feb 02 11:40:05 crc kubenswrapper[4838]: I0202 11:40:05.680753 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="a19e1d99-5603-4981-9853-785d48c37edd" containerName="registry-server" Feb 02 11:40:05 crc kubenswrapper[4838]: E0202 11:40:05.680764 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a19e1d99-5603-4981-9853-785d48c37edd" containerName="extract-content" Feb 02 11:40:05 crc kubenswrapper[4838]: I0202 11:40:05.680771 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="a19e1d99-5603-4981-9853-785d48c37edd" containerName="extract-content" Feb 02 11:40:05 crc kubenswrapper[4838]: E0202 11:40:05.680781 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed8ba392-0709-4915-97e0-df8de5cdb41b" containerName="extract-utilities" Feb 02 11:40:05 crc kubenswrapper[4838]: I0202 11:40:05.680788 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed8ba392-0709-4915-97e0-df8de5cdb41b" containerName="extract-utilities" Feb 02 11:40:05 crc kubenswrapper[4838]: I0202 11:40:05.680982 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed8ba392-0709-4915-97e0-df8de5cdb41b" containerName="registry-server" Feb 02 11:40:05 crc kubenswrapper[4838]: I0202 11:40:05.680995 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="a19e1d99-5603-4981-9853-785d48c37edd" containerName="registry-server" Feb 02 11:40:05 crc kubenswrapper[4838]: I0202 11:40:05.682588 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7vlxg" Feb 02 11:40:05 crc kubenswrapper[4838]: I0202 11:40:05.697030 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7vlxg"] Feb 02 11:40:05 crc kubenswrapper[4838]: I0202 11:40:05.837393 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70cfa7e0-c978-4ff6-86a8-f8d00cb5df76-catalog-content\") pod \"certified-operators-7vlxg\" (UID: \"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76\") " pod="openshift-marketplace/certified-operators-7vlxg" Feb 02 11:40:05 crc kubenswrapper[4838]: I0202 11:40:05.837516 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70cfa7e0-c978-4ff6-86a8-f8d00cb5df76-utilities\") pod \"certified-operators-7vlxg\" (UID: \"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76\") " pod="openshift-marketplace/certified-operators-7vlxg" Feb 02 11:40:05 crc kubenswrapper[4838]: I0202 11:40:05.837609 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-627rk\" (UniqueName: \"kubernetes.io/projected/70cfa7e0-c978-4ff6-86a8-f8d00cb5df76-kube-api-access-627rk\") pod \"certified-operators-7vlxg\" (UID: \"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76\") " pod="openshift-marketplace/certified-operators-7vlxg" Feb 02 11:40:05 crc kubenswrapper[4838]: I0202 11:40:05.938720 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70cfa7e0-c978-4ff6-86a8-f8d00cb5df76-utilities\") pod \"certified-operators-7vlxg\" (UID: \"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76\") " pod="openshift-marketplace/certified-operators-7vlxg" Feb 02 11:40:05 crc kubenswrapper[4838]: I0202 11:40:05.939005 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-627rk\" (UniqueName: \"kubernetes.io/projected/70cfa7e0-c978-4ff6-86a8-f8d00cb5df76-kube-api-access-627rk\") pod \"certified-operators-7vlxg\" (UID: \"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76\") " pod="openshift-marketplace/certified-operators-7vlxg" Feb 02 11:40:05 crc kubenswrapper[4838]: I0202 11:40:05.939077 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70cfa7e0-c978-4ff6-86a8-f8d00cb5df76-catalog-content\") pod \"certified-operators-7vlxg\" (UID: \"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76\") " pod="openshift-marketplace/certified-operators-7vlxg" Feb 02 11:40:05 crc kubenswrapper[4838]: I0202 11:40:05.939653 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70cfa7e0-c978-4ff6-86a8-f8d00cb5df76-catalog-content\") pod \"certified-operators-7vlxg\" (UID: \"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76\") " pod="openshift-marketplace/certified-operators-7vlxg" Feb 02 11:40:05 crc kubenswrapper[4838]: I0202 11:40:05.939775 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70cfa7e0-c978-4ff6-86a8-f8d00cb5df76-utilities\") pod \"certified-operators-7vlxg\" (UID: \"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76\") " pod="openshift-marketplace/certified-operators-7vlxg" Feb 02 11:40:05 crc kubenswrapper[4838]: I0202 11:40:05.957872 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-627rk\" (UniqueName: \"kubernetes.io/projected/70cfa7e0-c978-4ff6-86a8-f8d00cb5df76-kube-api-access-627rk\") pod \"certified-operators-7vlxg\" (UID: \"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76\") " pod="openshift-marketplace/certified-operators-7vlxg" Feb 02 11:40:06 crc kubenswrapper[4838]: I0202 11:40:06.001600 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7vlxg" Feb 02 11:40:06 crc kubenswrapper[4838]: I0202 11:40:06.587787 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7vlxg"] Feb 02 11:40:07 crc kubenswrapper[4838]: I0202 11:40:07.205715 4838 generic.go:334] "Generic (PLEG): container finished" podID="70cfa7e0-c978-4ff6-86a8-f8d00cb5df76" containerID="43eccab6f117d139e15797187537c9b0895f4e46ac755dbd58caf6819d8098dd" exitCode=0 Feb 02 11:40:07 crc kubenswrapper[4838]: I0202 11:40:07.205958 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7vlxg" event={"ID":"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76","Type":"ContainerDied","Data":"43eccab6f117d139e15797187537c9b0895f4e46ac755dbd58caf6819d8098dd"} Feb 02 11:40:07 crc kubenswrapper[4838]: I0202 11:40:07.205987 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7vlxg" event={"ID":"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76","Type":"ContainerStarted","Data":"cfa1cf8d31f32bc23efd9eef7f58bfef48c1040e25749c988959c71cfadc8cd1"} Feb 02 11:40:08 crc kubenswrapper[4838]: I0202 11:40:08.214570 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7vlxg" event={"ID":"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76","Type":"ContainerStarted","Data":"2c2d8979ec51039e32fa3d87782f3e91689dc854c5d42a13de00421eec7bb189"} Feb 02 11:40:09 crc kubenswrapper[4838]: I0202 11:40:09.224866 4838 generic.go:334] "Generic (PLEG): container finished" podID="70cfa7e0-c978-4ff6-86a8-f8d00cb5df76" containerID="2c2d8979ec51039e32fa3d87782f3e91689dc854c5d42a13de00421eec7bb189" exitCode=0 Feb 02 11:40:09 crc kubenswrapper[4838]: I0202 11:40:09.224913 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7vlxg" event={"ID":"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76","Type":"ContainerDied","Data":"2c2d8979ec51039e32fa3d87782f3e91689dc854c5d42a13de00421eec7bb189"} Feb 02 11:40:10 crc kubenswrapper[4838]: I0202 11:40:10.233586 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7vlxg" event={"ID":"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76","Type":"ContainerStarted","Data":"c79da1e810ade1a654b5b368b67b74ed590bcd6ebae6f67c91f41958fad10495"} Feb 02 11:40:10 crc kubenswrapper[4838]: I0202 11:40:10.261002 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7vlxg" podStartSLOduration=2.870372338 podStartE2EDuration="5.260979474s" podCreationTimestamp="2026-02-02 11:40:05 +0000 UTC" firstStartedPulling="2026-02-02 11:40:07.207561912 +0000 UTC m=+2801.544662940" lastFinishedPulling="2026-02-02 11:40:09.598169058 +0000 UTC m=+2803.935270076" observedRunningTime="2026-02-02 11:40:10.250306355 +0000 UTC m=+2804.587407403" watchObservedRunningTime="2026-02-02 11:40:10.260979474 +0000 UTC m=+2804.598080502" Feb 02 11:40:15 crc kubenswrapper[4838]: I0202 11:40:15.430087 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:40:15 crc kubenswrapper[4838]: I0202 11:40:15.430667 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:40:16 crc kubenswrapper[4838]: I0202 11:40:16.002056 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7vlxg" Feb 02 11:40:16 crc kubenswrapper[4838]: I0202 11:40:16.002308 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7vlxg" Feb 02 11:40:16 crc kubenswrapper[4838]: I0202 11:40:16.213322 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7vlxg" Feb 02 11:40:16 crc kubenswrapper[4838]: I0202 11:40:16.378704 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7vlxg" Feb 02 11:40:16 crc kubenswrapper[4838]: I0202 11:40:16.476315 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7vlxg"] Feb 02 11:40:18 crc kubenswrapper[4838]: I0202 11:40:18.319732 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7vlxg" podUID="70cfa7e0-c978-4ff6-86a8-f8d00cb5df76" containerName="registry-server" containerID="cri-o://c79da1e810ade1a654b5b368b67b74ed590bcd6ebae6f67c91f41958fad10495" gracePeriod=2 Feb 02 11:40:18 crc kubenswrapper[4838]: I0202 11:40:18.811918 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7vlxg" Feb 02 11:40:18 crc kubenswrapper[4838]: I0202 11:40:18.903834 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-627rk\" (UniqueName: \"kubernetes.io/projected/70cfa7e0-c978-4ff6-86a8-f8d00cb5df76-kube-api-access-627rk\") pod \"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76\" (UID: \"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76\") " Feb 02 11:40:18 crc kubenswrapper[4838]: I0202 11:40:18.903989 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70cfa7e0-c978-4ff6-86a8-f8d00cb5df76-utilities\") pod \"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76\" (UID: \"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76\") " Feb 02 11:40:18 crc kubenswrapper[4838]: I0202 11:40:18.904152 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70cfa7e0-c978-4ff6-86a8-f8d00cb5df76-catalog-content\") pod \"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76\" (UID: \"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76\") " Feb 02 11:40:18 crc kubenswrapper[4838]: I0202 11:40:18.905881 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70cfa7e0-c978-4ff6-86a8-f8d00cb5df76-utilities" (OuterVolumeSpecName: "utilities") pod "70cfa7e0-c978-4ff6-86a8-f8d00cb5df76" (UID: "70cfa7e0-c978-4ff6-86a8-f8d00cb5df76"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:40:18 crc kubenswrapper[4838]: I0202 11:40:18.931828 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70cfa7e0-c978-4ff6-86a8-f8d00cb5df76-kube-api-access-627rk" (OuterVolumeSpecName: "kube-api-access-627rk") pod "70cfa7e0-c978-4ff6-86a8-f8d00cb5df76" (UID: "70cfa7e0-c978-4ff6-86a8-f8d00cb5df76"). InnerVolumeSpecName "kube-api-access-627rk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:40:18 crc kubenswrapper[4838]: I0202 11:40:18.963266 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70cfa7e0-c978-4ff6-86a8-f8d00cb5df76-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "70cfa7e0-c978-4ff6-86a8-f8d00cb5df76" (UID: "70cfa7e0-c978-4ff6-86a8-f8d00cb5df76"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:40:19 crc kubenswrapper[4838]: I0202 11:40:19.007046 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70cfa7e0-c978-4ff6-86a8-f8d00cb5df76-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 11:40:19 crc kubenswrapper[4838]: I0202 11:40:19.007098 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-627rk\" (UniqueName: \"kubernetes.io/projected/70cfa7e0-c978-4ff6-86a8-f8d00cb5df76-kube-api-access-627rk\") on node \"crc\" DevicePath \"\"" Feb 02 11:40:19 crc kubenswrapper[4838]: I0202 11:40:19.007120 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70cfa7e0-c978-4ff6-86a8-f8d00cb5df76-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 11:40:19 crc kubenswrapper[4838]: I0202 11:40:19.330835 4838 generic.go:334] "Generic (PLEG): container finished" podID="70cfa7e0-c978-4ff6-86a8-f8d00cb5df76" containerID="c79da1e810ade1a654b5b368b67b74ed590bcd6ebae6f67c91f41958fad10495" exitCode=0 Feb 02 11:40:19 crc kubenswrapper[4838]: I0202 11:40:19.330877 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7vlxg" event={"ID":"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76","Type":"ContainerDied","Data":"c79da1e810ade1a654b5b368b67b74ed590bcd6ebae6f67c91f41958fad10495"} Feb 02 11:40:19 crc kubenswrapper[4838]: I0202 11:40:19.330907 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7vlxg" event={"ID":"70cfa7e0-c978-4ff6-86a8-f8d00cb5df76","Type":"ContainerDied","Data":"cfa1cf8d31f32bc23efd9eef7f58bfef48c1040e25749c988959c71cfadc8cd1"} Feb 02 11:40:19 crc kubenswrapper[4838]: I0202 11:40:19.330913 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7vlxg" Feb 02 11:40:19 crc kubenswrapper[4838]: I0202 11:40:19.330924 4838 scope.go:117] "RemoveContainer" containerID="c79da1e810ade1a654b5b368b67b74ed590bcd6ebae6f67c91f41958fad10495" Feb 02 11:40:19 crc kubenswrapper[4838]: I0202 11:40:19.348924 4838 scope.go:117] "RemoveContainer" containerID="2c2d8979ec51039e32fa3d87782f3e91689dc854c5d42a13de00421eec7bb189" Feb 02 11:40:19 crc kubenswrapper[4838]: I0202 11:40:19.364433 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7vlxg"] Feb 02 11:40:19 crc kubenswrapper[4838]: I0202 11:40:19.375506 4838 scope.go:117] "RemoveContainer" containerID="43eccab6f117d139e15797187537c9b0895f4e46ac755dbd58caf6819d8098dd" Feb 02 11:40:19 crc kubenswrapper[4838]: I0202 11:40:19.378146 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7vlxg"] Feb 02 11:40:19 crc kubenswrapper[4838]: I0202 11:40:19.413671 4838 scope.go:117] "RemoveContainer" containerID="c79da1e810ade1a654b5b368b67b74ed590bcd6ebae6f67c91f41958fad10495" Feb 02 11:40:19 crc kubenswrapper[4838]: E0202 11:40:19.415080 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c79da1e810ade1a654b5b368b67b74ed590bcd6ebae6f67c91f41958fad10495\": container with ID starting with c79da1e810ade1a654b5b368b67b74ed590bcd6ebae6f67c91f41958fad10495 not found: ID does not exist" containerID="c79da1e810ade1a654b5b368b67b74ed590bcd6ebae6f67c91f41958fad10495" Feb 02 11:40:19 crc kubenswrapper[4838]: I0202 11:40:19.415120 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c79da1e810ade1a654b5b368b67b74ed590bcd6ebae6f67c91f41958fad10495"} err="failed to get container status \"c79da1e810ade1a654b5b368b67b74ed590bcd6ebae6f67c91f41958fad10495\": rpc error: code = NotFound desc = could not find container \"c79da1e810ade1a654b5b368b67b74ed590bcd6ebae6f67c91f41958fad10495\": container with ID starting with c79da1e810ade1a654b5b368b67b74ed590bcd6ebae6f67c91f41958fad10495 not found: ID does not exist" Feb 02 11:40:19 crc kubenswrapper[4838]: I0202 11:40:19.415155 4838 scope.go:117] "RemoveContainer" containerID="2c2d8979ec51039e32fa3d87782f3e91689dc854c5d42a13de00421eec7bb189" Feb 02 11:40:19 crc kubenswrapper[4838]: E0202 11:40:19.415577 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c2d8979ec51039e32fa3d87782f3e91689dc854c5d42a13de00421eec7bb189\": container with ID starting with 2c2d8979ec51039e32fa3d87782f3e91689dc854c5d42a13de00421eec7bb189 not found: ID does not exist" containerID="2c2d8979ec51039e32fa3d87782f3e91689dc854c5d42a13de00421eec7bb189" Feb 02 11:40:19 crc kubenswrapper[4838]: I0202 11:40:19.415637 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c2d8979ec51039e32fa3d87782f3e91689dc854c5d42a13de00421eec7bb189"} err="failed to get container status \"2c2d8979ec51039e32fa3d87782f3e91689dc854c5d42a13de00421eec7bb189\": rpc error: code = NotFound desc = could not find container \"2c2d8979ec51039e32fa3d87782f3e91689dc854c5d42a13de00421eec7bb189\": container with ID starting with 2c2d8979ec51039e32fa3d87782f3e91689dc854c5d42a13de00421eec7bb189 not found: ID does not exist" Feb 02 11:40:19 crc kubenswrapper[4838]: I0202 11:40:19.415664 4838 scope.go:117] "RemoveContainer" containerID="43eccab6f117d139e15797187537c9b0895f4e46ac755dbd58caf6819d8098dd" Feb 02 11:40:19 crc kubenswrapper[4838]: E0202 11:40:19.416020 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43eccab6f117d139e15797187537c9b0895f4e46ac755dbd58caf6819d8098dd\": container with ID starting with 43eccab6f117d139e15797187537c9b0895f4e46ac755dbd58caf6819d8098dd not found: ID does not exist" containerID="43eccab6f117d139e15797187537c9b0895f4e46ac755dbd58caf6819d8098dd" Feb 02 11:40:19 crc kubenswrapper[4838]: I0202 11:40:19.416059 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43eccab6f117d139e15797187537c9b0895f4e46ac755dbd58caf6819d8098dd"} err="failed to get container status \"43eccab6f117d139e15797187537c9b0895f4e46ac755dbd58caf6819d8098dd\": rpc error: code = NotFound desc = could not find container \"43eccab6f117d139e15797187537c9b0895f4e46ac755dbd58caf6819d8098dd\": container with ID starting with 43eccab6f117d139e15797187537c9b0895f4e46ac755dbd58caf6819d8098dd not found: ID does not exist" Feb 02 11:40:20 crc kubenswrapper[4838]: I0202 11:40:20.524040 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70cfa7e0-c978-4ff6-86a8-f8d00cb5df76" path="/var/lib/kubelet/pods/70cfa7e0-c978-4ff6-86a8-f8d00cb5df76/volumes" Feb 02 11:40:45 crc kubenswrapper[4838]: I0202 11:40:45.430092 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:40:45 crc kubenswrapper[4838]: I0202 11:40:45.430657 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:40:52 crc kubenswrapper[4838]: I0202 11:40:52.670919 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5mrm7"] Feb 02 11:40:52 crc kubenswrapper[4838]: E0202 11:40:52.671896 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70cfa7e0-c978-4ff6-86a8-f8d00cb5df76" containerName="registry-server" Feb 02 11:40:52 crc kubenswrapper[4838]: I0202 11:40:52.671910 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="70cfa7e0-c978-4ff6-86a8-f8d00cb5df76" containerName="registry-server" Feb 02 11:40:52 crc kubenswrapper[4838]: E0202 11:40:52.671925 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70cfa7e0-c978-4ff6-86a8-f8d00cb5df76" containerName="extract-utilities" Feb 02 11:40:52 crc kubenswrapper[4838]: I0202 11:40:52.671932 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="70cfa7e0-c978-4ff6-86a8-f8d00cb5df76" containerName="extract-utilities" Feb 02 11:40:52 crc kubenswrapper[4838]: E0202 11:40:52.671947 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70cfa7e0-c978-4ff6-86a8-f8d00cb5df76" containerName="extract-content" Feb 02 11:40:52 crc kubenswrapper[4838]: I0202 11:40:52.671953 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="70cfa7e0-c978-4ff6-86a8-f8d00cb5df76" containerName="extract-content" Feb 02 11:40:52 crc kubenswrapper[4838]: I0202 11:40:52.672164 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="70cfa7e0-c978-4ff6-86a8-f8d00cb5df76" containerName="registry-server" Feb 02 11:40:52 crc kubenswrapper[4838]: I0202 11:40:52.673506 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5mrm7" Feb 02 11:40:52 crc kubenswrapper[4838]: I0202 11:40:52.683060 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5mrm7"] Feb 02 11:40:52 crc kubenswrapper[4838]: I0202 11:40:52.736842 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68bafdd1-d2de-4cd7-8541-1ac5d0e49de7-catalog-content\") pod \"community-operators-5mrm7\" (UID: \"68bafdd1-d2de-4cd7-8541-1ac5d0e49de7\") " pod="openshift-marketplace/community-operators-5mrm7" Feb 02 11:40:52 crc kubenswrapper[4838]: I0202 11:40:52.737193 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68bafdd1-d2de-4cd7-8541-1ac5d0e49de7-utilities\") pod \"community-operators-5mrm7\" (UID: \"68bafdd1-d2de-4cd7-8541-1ac5d0e49de7\") " pod="openshift-marketplace/community-operators-5mrm7" Feb 02 11:40:52 crc kubenswrapper[4838]: I0202 11:40:52.737291 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4g7w\" (UniqueName: \"kubernetes.io/projected/68bafdd1-d2de-4cd7-8541-1ac5d0e49de7-kube-api-access-h4g7w\") pod \"community-operators-5mrm7\" (UID: \"68bafdd1-d2de-4cd7-8541-1ac5d0e49de7\") " pod="openshift-marketplace/community-operators-5mrm7" Feb 02 11:40:52 crc kubenswrapper[4838]: I0202 11:40:52.838784 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4g7w\" (UniqueName: \"kubernetes.io/projected/68bafdd1-d2de-4cd7-8541-1ac5d0e49de7-kube-api-access-h4g7w\") pod \"community-operators-5mrm7\" (UID: \"68bafdd1-d2de-4cd7-8541-1ac5d0e49de7\") " pod="openshift-marketplace/community-operators-5mrm7" Feb 02 11:40:52 crc kubenswrapper[4838]: I0202 11:40:52.838998 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68bafdd1-d2de-4cd7-8541-1ac5d0e49de7-catalog-content\") pod \"community-operators-5mrm7\" (UID: \"68bafdd1-d2de-4cd7-8541-1ac5d0e49de7\") " pod="openshift-marketplace/community-operators-5mrm7" Feb 02 11:40:52 crc kubenswrapper[4838]: I0202 11:40:52.839047 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68bafdd1-d2de-4cd7-8541-1ac5d0e49de7-utilities\") pod \"community-operators-5mrm7\" (UID: \"68bafdd1-d2de-4cd7-8541-1ac5d0e49de7\") " pod="openshift-marketplace/community-operators-5mrm7" Feb 02 11:40:52 crc kubenswrapper[4838]: I0202 11:40:52.839722 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68bafdd1-d2de-4cd7-8541-1ac5d0e49de7-utilities\") pod \"community-operators-5mrm7\" (UID: \"68bafdd1-d2de-4cd7-8541-1ac5d0e49de7\") " pod="openshift-marketplace/community-operators-5mrm7" Feb 02 11:40:52 crc kubenswrapper[4838]: I0202 11:40:52.840315 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68bafdd1-d2de-4cd7-8541-1ac5d0e49de7-catalog-content\") pod \"community-operators-5mrm7\" (UID: \"68bafdd1-d2de-4cd7-8541-1ac5d0e49de7\") " pod="openshift-marketplace/community-operators-5mrm7" Feb 02 11:40:52 crc kubenswrapper[4838]: I0202 11:40:52.868540 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4g7w\" (UniqueName: \"kubernetes.io/projected/68bafdd1-d2de-4cd7-8541-1ac5d0e49de7-kube-api-access-h4g7w\") pod \"community-operators-5mrm7\" (UID: \"68bafdd1-d2de-4cd7-8541-1ac5d0e49de7\") " pod="openshift-marketplace/community-operators-5mrm7" Feb 02 11:40:53 crc kubenswrapper[4838]: I0202 11:40:53.012802 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5mrm7" Feb 02 11:40:53 crc kubenswrapper[4838]: I0202 11:40:53.597308 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5mrm7"] Feb 02 11:40:53 crc kubenswrapper[4838]: I0202 11:40:53.659342 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5mrm7" event={"ID":"68bafdd1-d2de-4cd7-8541-1ac5d0e49de7","Type":"ContainerStarted","Data":"f27564902aebaddd2692aa26730928a850e40f9ba11ec2cf1bc4770bf126dc21"} Feb 02 11:40:54 crc kubenswrapper[4838]: I0202 11:40:54.667136 4838 generic.go:334] "Generic (PLEG): container finished" podID="68bafdd1-d2de-4cd7-8541-1ac5d0e49de7" containerID="28f776d72c8dfbe16e7c5aa615c64133e8c650d40f92f340917fd96eee67695f" exitCode=0 Feb 02 11:40:54 crc kubenswrapper[4838]: I0202 11:40:54.667383 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5mrm7" event={"ID":"68bafdd1-d2de-4cd7-8541-1ac5d0e49de7","Type":"ContainerDied","Data":"28f776d72c8dfbe16e7c5aa615c64133e8c650d40f92f340917fd96eee67695f"} Feb 02 11:40:56 crc kubenswrapper[4838]: I0202 11:40:56.697978 4838 generic.go:334] "Generic (PLEG): container finished" podID="68bafdd1-d2de-4cd7-8541-1ac5d0e49de7" containerID="896fe671d48c863b8768737107948008b81bc782b79e2ecb7b2f42d29a569c53" exitCode=0 Feb 02 11:40:56 crc kubenswrapper[4838]: I0202 11:40:56.698088 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5mrm7" event={"ID":"68bafdd1-d2de-4cd7-8541-1ac5d0e49de7","Type":"ContainerDied","Data":"896fe671d48c863b8768737107948008b81bc782b79e2ecb7b2f42d29a569c53"} Feb 02 11:40:57 crc kubenswrapper[4838]: I0202 11:40:57.708937 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5mrm7" event={"ID":"68bafdd1-d2de-4cd7-8541-1ac5d0e49de7","Type":"ContainerStarted","Data":"11eafb34c6644cbb86abc9f7f2ac34ed0c30baec2e3faf69204d9f1e508a1e67"} Feb 02 11:40:57 crc kubenswrapper[4838]: I0202 11:40:57.741397 4838 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5mrm7" podStartSLOduration=3.325767376 podStartE2EDuration="5.741379718s" podCreationTimestamp="2026-02-02 11:40:52 +0000 UTC" firstStartedPulling="2026-02-02 11:40:54.669072546 +0000 UTC m=+2849.006173574" lastFinishedPulling="2026-02-02 11:40:57.084684888 +0000 UTC m=+2851.421785916" observedRunningTime="2026-02-02 11:40:57.731363947 +0000 UTC m=+2852.068464985" watchObservedRunningTime="2026-02-02 11:40:57.741379718 +0000 UTC m=+2852.078480746" Feb 02 11:41:03 crc kubenswrapper[4838]: I0202 11:41:03.013525 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5mrm7" Feb 02 11:41:03 crc kubenswrapper[4838]: I0202 11:41:03.013962 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5mrm7" Feb 02 11:41:03 crc kubenswrapper[4838]: I0202 11:41:03.075111 4838 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5mrm7" Feb 02 11:41:03 crc kubenswrapper[4838]: I0202 11:41:03.806432 4838 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5mrm7" Feb 02 11:41:03 crc kubenswrapper[4838]: I0202 11:41:03.851498 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5mrm7"] Feb 02 11:41:05 crc kubenswrapper[4838]: I0202 11:41:05.764859 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5mrm7" podUID="68bafdd1-d2de-4cd7-8541-1ac5d0e49de7" containerName="registry-server" containerID="cri-o://11eafb34c6644cbb86abc9f7f2ac34ed0c30baec2e3faf69204d9f1e508a1e67" gracePeriod=2 Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.233644 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5mrm7" Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.332154 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68bafdd1-d2de-4cd7-8541-1ac5d0e49de7-catalog-content\") pod \"68bafdd1-d2de-4cd7-8541-1ac5d0e49de7\" (UID: \"68bafdd1-d2de-4cd7-8541-1ac5d0e49de7\") " Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.332381 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4g7w\" (UniqueName: \"kubernetes.io/projected/68bafdd1-d2de-4cd7-8541-1ac5d0e49de7-kube-api-access-h4g7w\") pod \"68bafdd1-d2de-4cd7-8541-1ac5d0e49de7\" (UID: \"68bafdd1-d2de-4cd7-8541-1ac5d0e49de7\") " Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.332406 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68bafdd1-d2de-4cd7-8541-1ac5d0e49de7-utilities\") pod \"68bafdd1-d2de-4cd7-8541-1ac5d0e49de7\" (UID: \"68bafdd1-d2de-4cd7-8541-1ac5d0e49de7\") " Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.338244 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68bafdd1-d2de-4cd7-8541-1ac5d0e49de7-kube-api-access-h4g7w" (OuterVolumeSpecName: "kube-api-access-h4g7w") pod "68bafdd1-d2de-4cd7-8541-1ac5d0e49de7" (UID: "68bafdd1-d2de-4cd7-8541-1ac5d0e49de7"). InnerVolumeSpecName "kube-api-access-h4g7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.338301 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68bafdd1-d2de-4cd7-8541-1ac5d0e49de7-utilities" (OuterVolumeSpecName: "utilities") pod "68bafdd1-d2de-4cd7-8541-1ac5d0e49de7" (UID: "68bafdd1-d2de-4cd7-8541-1ac5d0e49de7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.391114 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68bafdd1-d2de-4cd7-8541-1ac5d0e49de7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "68bafdd1-d2de-4cd7-8541-1ac5d0e49de7" (UID: "68bafdd1-d2de-4cd7-8541-1ac5d0e49de7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.436119 4838 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68bafdd1-d2de-4cd7-8541-1ac5d0e49de7-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.436157 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4g7w\" (UniqueName: \"kubernetes.io/projected/68bafdd1-d2de-4cd7-8541-1ac5d0e49de7-kube-api-access-h4g7w\") on node \"crc\" DevicePath \"\"" Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.436172 4838 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68bafdd1-d2de-4cd7-8541-1ac5d0e49de7-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.778046 4838 generic.go:334] "Generic (PLEG): container finished" podID="68bafdd1-d2de-4cd7-8541-1ac5d0e49de7" containerID="11eafb34c6644cbb86abc9f7f2ac34ed0c30baec2e3faf69204d9f1e508a1e67" exitCode=0 Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.778102 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5mrm7" event={"ID":"68bafdd1-d2de-4cd7-8541-1ac5d0e49de7","Type":"ContainerDied","Data":"11eafb34c6644cbb86abc9f7f2ac34ed0c30baec2e3faf69204d9f1e508a1e67"} Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.778120 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5mrm7" Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.778136 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5mrm7" event={"ID":"68bafdd1-d2de-4cd7-8541-1ac5d0e49de7","Type":"ContainerDied","Data":"f27564902aebaddd2692aa26730928a850e40f9ba11ec2cf1bc4770bf126dc21"} Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.778168 4838 scope.go:117] "RemoveContainer" containerID="11eafb34c6644cbb86abc9f7f2ac34ed0c30baec2e3faf69204d9f1e508a1e67" Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.803459 4838 scope.go:117] "RemoveContainer" containerID="896fe671d48c863b8768737107948008b81bc782b79e2ecb7b2f42d29a569c53" Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.823795 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5mrm7"] Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.824205 4838 scope.go:117] "RemoveContainer" containerID="28f776d72c8dfbe16e7c5aa615c64133e8c650d40f92f340917fd96eee67695f" Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.832048 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5mrm7"] Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.867581 4838 scope.go:117] "RemoveContainer" containerID="11eafb34c6644cbb86abc9f7f2ac34ed0c30baec2e3faf69204d9f1e508a1e67" Feb 02 11:41:06 crc kubenswrapper[4838]: E0202 11:41:06.867979 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11eafb34c6644cbb86abc9f7f2ac34ed0c30baec2e3faf69204d9f1e508a1e67\": container with ID starting with 11eafb34c6644cbb86abc9f7f2ac34ed0c30baec2e3faf69204d9f1e508a1e67 not found: ID does not exist" containerID="11eafb34c6644cbb86abc9f7f2ac34ed0c30baec2e3faf69204d9f1e508a1e67" Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.868021 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11eafb34c6644cbb86abc9f7f2ac34ed0c30baec2e3faf69204d9f1e508a1e67"} err="failed to get container status \"11eafb34c6644cbb86abc9f7f2ac34ed0c30baec2e3faf69204d9f1e508a1e67\": rpc error: code = NotFound desc = could not find container \"11eafb34c6644cbb86abc9f7f2ac34ed0c30baec2e3faf69204d9f1e508a1e67\": container with ID starting with 11eafb34c6644cbb86abc9f7f2ac34ed0c30baec2e3faf69204d9f1e508a1e67 not found: ID does not exist" Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.868046 4838 scope.go:117] "RemoveContainer" containerID="896fe671d48c863b8768737107948008b81bc782b79e2ecb7b2f42d29a569c53" Feb 02 11:41:06 crc kubenswrapper[4838]: E0202 11:41:06.868265 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"896fe671d48c863b8768737107948008b81bc782b79e2ecb7b2f42d29a569c53\": container with ID starting with 896fe671d48c863b8768737107948008b81bc782b79e2ecb7b2f42d29a569c53 not found: ID does not exist" containerID="896fe671d48c863b8768737107948008b81bc782b79e2ecb7b2f42d29a569c53" Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.868281 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"896fe671d48c863b8768737107948008b81bc782b79e2ecb7b2f42d29a569c53"} err="failed to get container status \"896fe671d48c863b8768737107948008b81bc782b79e2ecb7b2f42d29a569c53\": rpc error: code = NotFound desc = could not find container \"896fe671d48c863b8768737107948008b81bc782b79e2ecb7b2f42d29a569c53\": container with ID starting with 896fe671d48c863b8768737107948008b81bc782b79e2ecb7b2f42d29a569c53 not found: ID does not exist" Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.868294 4838 scope.go:117] "RemoveContainer" containerID="28f776d72c8dfbe16e7c5aa615c64133e8c650d40f92f340917fd96eee67695f" Feb 02 11:41:06 crc kubenswrapper[4838]: E0202 11:41:06.868455 4838 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28f776d72c8dfbe16e7c5aa615c64133e8c650d40f92f340917fd96eee67695f\": container with ID starting with 28f776d72c8dfbe16e7c5aa615c64133e8c650d40f92f340917fd96eee67695f not found: ID does not exist" containerID="28f776d72c8dfbe16e7c5aa615c64133e8c650d40f92f340917fd96eee67695f" Feb 02 11:41:06 crc kubenswrapper[4838]: I0202 11:41:06.868472 4838 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28f776d72c8dfbe16e7c5aa615c64133e8c650d40f92f340917fd96eee67695f"} err="failed to get container status \"28f776d72c8dfbe16e7c5aa615c64133e8c650d40f92f340917fd96eee67695f\": rpc error: code = NotFound desc = could not find container \"28f776d72c8dfbe16e7c5aa615c64133e8c650d40f92f340917fd96eee67695f\": container with ID starting with 28f776d72c8dfbe16e7c5aa615c64133e8c650d40f92f340917fd96eee67695f not found: ID does not exist" Feb 02 11:41:08 crc kubenswrapper[4838]: I0202 11:41:08.518389 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68bafdd1-d2de-4cd7-8541-1ac5d0e49de7" path="/var/lib/kubelet/pods/68bafdd1-d2de-4cd7-8541-1ac5d0e49de7/volumes" Feb 02 11:41:15 crc kubenswrapper[4838]: I0202 11:41:15.429707 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:41:15 crc kubenswrapper[4838]: I0202 11:41:15.430370 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:41:15 crc kubenswrapper[4838]: I0202 11:41:15.430428 4838 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 11:41:15 crc kubenswrapper[4838]: I0202 11:41:15.431270 4838 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c9402ad06463da91e1442dcbaf0f24edbf39af95cfe72e53a949140dfa1034a3"} pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 11:41:15 crc kubenswrapper[4838]: I0202 11:41:15.431338 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" containerID="cri-o://c9402ad06463da91e1442dcbaf0f24edbf39af95cfe72e53a949140dfa1034a3" gracePeriod=600 Feb 02 11:41:15 crc kubenswrapper[4838]: I0202 11:41:15.862116 4838 generic.go:334] "Generic (PLEG): container finished" podID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerID="c9402ad06463da91e1442dcbaf0f24edbf39af95cfe72e53a949140dfa1034a3" exitCode=0 Feb 02 11:41:15 crc kubenswrapper[4838]: I0202 11:41:15.862352 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerDied","Data":"c9402ad06463da91e1442dcbaf0f24edbf39af95cfe72e53a949140dfa1034a3"} Feb 02 11:41:15 crc kubenswrapper[4838]: I0202 11:41:15.862383 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerStarted","Data":"50f5ae09aeebc172928bc46b6909f71e360f73d5ed00ab1cdfd659a8d6bf2c78"} Feb 02 11:41:15 crc kubenswrapper[4838]: I0202 11:41:15.862401 4838 scope.go:117] "RemoveContainer" containerID="fad94ad567e1d751778aaac2371512df907bde50f50f515bf8b42febb5cd2980" Feb 02 11:41:27 crc kubenswrapper[4838]: I0202 11:41:27.974689 4838 generic.go:334] "Generic (PLEG): container finished" podID="ddff32a7-e027-4d2e-b77f-c7053ae36c59" containerID="51815ca6efcdba12f8e9d68a2e89ffee60df54c9aeb03a1f751a9fbea7403fbf" exitCode=0 Feb 02 11:41:27 crc kubenswrapper[4838]: I0202 11:41:27.975282 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs9pd/must-gather-9qprb" event={"ID":"ddff32a7-e027-4d2e-b77f-c7053ae36c59","Type":"ContainerDied","Data":"51815ca6efcdba12f8e9d68a2e89ffee60df54c9aeb03a1f751a9fbea7403fbf"} Feb 02 11:41:27 crc kubenswrapper[4838]: I0202 11:41:27.976002 4838 scope.go:117] "RemoveContainer" containerID="51815ca6efcdba12f8e9d68a2e89ffee60df54c9aeb03a1f751a9fbea7403fbf" Feb 02 11:41:28 crc kubenswrapper[4838]: I0202 11:41:28.892414 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-cs9pd_must-gather-9qprb_ddff32a7-e027-4d2e-b77f-c7053ae36c59/gather/0.log" Feb 02 11:41:33 crc kubenswrapper[4838]: E0202 11:41:33.574667 4838 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.241:54526->38.102.83.241:43415: write tcp 38.102.83.241:54526->38.102.83.241:43415: write: broken pipe Feb 02 11:41:39 crc kubenswrapper[4838]: I0202 11:41:39.822077 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-cs9pd/must-gather-9qprb"] Feb 02 11:41:39 crc kubenswrapper[4838]: I0202 11:41:39.824356 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-cs9pd/must-gather-9qprb" podUID="ddff32a7-e027-4d2e-b77f-c7053ae36c59" containerName="copy" containerID="cri-o://13e93b89d3635aa8b45d0fa03b67c813f43b738aa898556bfca86c4416446edd" gracePeriod=2 Feb 02 11:41:39 crc kubenswrapper[4838]: I0202 11:41:39.830420 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-cs9pd/must-gather-9qprb"] Feb 02 11:41:40 crc kubenswrapper[4838]: I0202 11:41:40.099903 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-cs9pd_must-gather-9qprb_ddff32a7-e027-4d2e-b77f-c7053ae36c59/copy/0.log" Feb 02 11:41:40 crc kubenswrapper[4838]: I0202 11:41:40.100669 4838 generic.go:334] "Generic (PLEG): container finished" podID="ddff32a7-e027-4d2e-b77f-c7053ae36c59" containerID="13e93b89d3635aa8b45d0fa03b67c813f43b738aa898556bfca86c4416446edd" exitCode=143 Feb 02 11:41:40 crc kubenswrapper[4838]: I0202 11:41:40.363447 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-cs9pd_must-gather-9qprb_ddff32a7-e027-4d2e-b77f-c7053ae36c59/copy/0.log" Feb 02 11:41:40 crc kubenswrapper[4838]: I0202 11:41:40.364003 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs9pd/must-gather-9qprb" Feb 02 11:41:40 crc kubenswrapper[4838]: I0202 11:41:40.450292 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ddff32a7-e027-4d2e-b77f-c7053ae36c59-must-gather-output\") pod \"ddff32a7-e027-4d2e-b77f-c7053ae36c59\" (UID: \"ddff32a7-e027-4d2e-b77f-c7053ae36c59\") " Feb 02 11:41:40 crc kubenswrapper[4838]: I0202 11:41:40.450395 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pkvn9\" (UniqueName: \"kubernetes.io/projected/ddff32a7-e027-4d2e-b77f-c7053ae36c59-kube-api-access-pkvn9\") pod \"ddff32a7-e027-4d2e-b77f-c7053ae36c59\" (UID: \"ddff32a7-e027-4d2e-b77f-c7053ae36c59\") " Feb 02 11:41:40 crc kubenswrapper[4838]: I0202 11:41:40.463544 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddff32a7-e027-4d2e-b77f-c7053ae36c59-kube-api-access-pkvn9" (OuterVolumeSpecName: "kube-api-access-pkvn9") pod "ddff32a7-e027-4d2e-b77f-c7053ae36c59" (UID: "ddff32a7-e027-4d2e-b77f-c7053ae36c59"). InnerVolumeSpecName "kube-api-access-pkvn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:41:40 crc kubenswrapper[4838]: I0202 11:41:40.553102 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pkvn9\" (UniqueName: \"kubernetes.io/projected/ddff32a7-e027-4d2e-b77f-c7053ae36c59-kube-api-access-pkvn9\") on node \"crc\" DevicePath \"\"" Feb 02 11:41:40 crc kubenswrapper[4838]: I0202 11:41:40.596521 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ddff32a7-e027-4d2e-b77f-c7053ae36c59-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "ddff32a7-e027-4d2e-b77f-c7053ae36c59" (UID: "ddff32a7-e027-4d2e-b77f-c7053ae36c59"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 11:41:40 crc kubenswrapper[4838]: I0202 11:41:40.655238 4838 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ddff32a7-e027-4d2e-b77f-c7053ae36c59-must-gather-output\") on node \"crc\" DevicePath \"\"" Feb 02 11:41:41 crc kubenswrapper[4838]: I0202 11:41:41.111097 4838 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-cs9pd_must-gather-9qprb_ddff32a7-e027-4d2e-b77f-c7053ae36c59/copy/0.log" Feb 02 11:41:41 crc kubenswrapper[4838]: I0202 11:41:41.112238 4838 scope.go:117] "RemoveContainer" containerID="13e93b89d3635aa8b45d0fa03b67c813f43b738aa898556bfca86c4416446edd" Feb 02 11:41:41 crc kubenswrapper[4838]: I0202 11:41:41.112321 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs9pd/must-gather-9qprb" Feb 02 11:41:41 crc kubenswrapper[4838]: I0202 11:41:41.144796 4838 scope.go:117] "RemoveContainer" containerID="51815ca6efcdba12f8e9d68a2e89ffee60df54c9aeb03a1f751a9fbea7403fbf" Feb 02 11:41:42 crc kubenswrapper[4838]: I0202 11:41:42.517570 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddff32a7-e027-4d2e-b77f-c7053ae36c59" path="/var/lib/kubelet/pods/ddff32a7-e027-4d2e-b77f-c7053ae36c59/volumes" Feb 02 11:42:18 crc kubenswrapper[4838]: I0202 11:42:17.559999 4838 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-pms7g" podUID="a17b67e7-df64-4f12-8e78-c52068d2b1df" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.78:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 11:42:18 crc kubenswrapper[4838]: I0202 11:42:17.602134 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-pms7g" podUID="a17b67e7-df64-4f12-8e78-c52068d2b1df" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.78:8081/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 11:42:50 crc kubenswrapper[4838]: I0202 11:42:50.215687 4838 scope.go:117] "RemoveContainer" containerID="7ea58378486b0ca0cdf4ba3e7124dd03bcec9ef75d9d04a8641e99dec371ee0e" Feb 02 11:43:15 crc kubenswrapper[4838]: I0202 11:43:15.429988 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:43:15 crc kubenswrapper[4838]: I0202 11:43:15.430559 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:43:45 crc kubenswrapper[4838]: I0202 11:43:45.429343 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:43:45 crc kubenswrapper[4838]: I0202 11:43:45.429898 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:43:50 crc kubenswrapper[4838]: I0202 11:43:50.270896 4838 scope.go:117] "RemoveContainer" containerID="8034e4333d5090023a77b8f09981f0820913a210b4e53e58fa08aa208fcde901" Feb 02 11:44:15 crc kubenswrapper[4838]: I0202 11:44:15.430443 4838 patch_prober.go:28] interesting pod/machine-config-daemon-n7ctv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 11:44:15 crc kubenswrapper[4838]: I0202 11:44:15.430894 4838 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 11:44:15 crc kubenswrapper[4838]: I0202 11:44:15.430938 4838 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" Feb 02 11:44:15 crc kubenswrapper[4838]: I0202 11:44:15.431658 4838 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"50f5ae09aeebc172928bc46b6909f71e360f73d5ed00ab1cdfd659a8d6bf2c78"} pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 11:44:15 crc kubenswrapper[4838]: I0202 11:44:15.431710 4838 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerName="machine-config-daemon" containerID="cri-o://50f5ae09aeebc172928bc46b6909f71e360f73d5ed00ab1cdfd659a8d6bf2c78" gracePeriod=600 Feb 02 11:44:15 crc kubenswrapper[4838]: E0202 11:44:15.555570 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:44:16 crc kubenswrapper[4838]: I0202 11:44:16.462004 4838 generic.go:334] "Generic (PLEG): container finished" podID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" containerID="50f5ae09aeebc172928bc46b6909f71e360f73d5ed00ab1cdfd659a8d6bf2c78" exitCode=0 Feb 02 11:44:16 crc kubenswrapper[4838]: I0202 11:44:16.462076 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" event={"ID":"c1e0f2bd-7afa-44f4-a3cb-cad88c063dce","Type":"ContainerDied","Data":"50f5ae09aeebc172928bc46b6909f71e360f73d5ed00ab1cdfd659a8d6bf2c78"} Feb 02 11:44:16 crc kubenswrapper[4838]: I0202 11:44:16.462872 4838 scope.go:117] "RemoveContainer" containerID="c9402ad06463da91e1442dcbaf0f24edbf39af95cfe72e53a949140dfa1034a3" Feb 02 11:44:16 crc kubenswrapper[4838]: I0202 11:44:16.463754 4838 scope.go:117] "RemoveContainer" containerID="50f5ae09aeebc172928bc46b6909f71e360f73d5ed00ab1cdfd659a8d6bf2c78" Feb 02 11:44:16 crc kubenswrapper[4838]: E0202 11:44:16.464351 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:44:27 crc kubenswrapper[4838]: I0202 11:44:27.505743 4838 scope.go:117] "RemoveContainer" containerID="50f5ae09aeebc172928bc46b6909f71e360f73d5ed00ab1cdfd659a8d6bf2c78" Feb 02 11:44:27 crc kubenswrapper[4838]: E0202 11:44:27.507763 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:44:40 crc kubenswrapper[4838]: I0202 11:44:40.506297 4838 scope.go:117] "RemoveContainer" containerID="50f5ae09aeebc172928bc46b6909f71e360f73d5ed00ab1cdfd659a8d6bf2c78" Feb 02 11:44:40 crc kubenswrapper[4838]: E0202 11:44:40.507138 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:44:51 crc kubenswrapper[4838]: I0202 11:44:51.506126 4838 scope.go:117] "RemoveContainer" containerID="50f5ae09aeebc172928bc46b6909f71e360f73d5ed00ab1cdfd659a8d6bf2c78" Feb 02 11:44:51 crc kubenswrapper[4838]: E0202 11:44:51.507193 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.164977 4838 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500545-5fjsw"] Feb 02 11:45:00 crc kubenswrapper[4838]: E0202 11:45:00.166314 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddff32a7-e027-4d2e-b77f-c7053ae36c59" containerName="gather" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.166350 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddff32a7-e027-4d2e-b77f-c7053ae36c59" containerName="gather" Feb 02 11:45:00 crc kubenswrapper[4838]: E0202 11:45:00.166370 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68bafdd1-d2de-4cd7-8541-1ac5d0e49de7" containerName="extract-content" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.166378 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="68bafdd1-d2de-4cd7-8541-1ac5d0e49de7" containerName="extract-content" Feb 02 11:45:00 crc kubenswrapper[4838]: E0202 11:45:00.166386 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68bafdd1-d2de-4cd7-8541-1ac5d0e49de7" containerName="registry-server" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.166393 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="68bafdd1-d2de-4cd7-8541-1ac5d0e49de7" containerName="registry-server" Feb 02 11:45:00 crc kubenswrapper[4838]: E0202 11:45:00.166415 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddff32a7-e027-4d2e-b77f-c7053ae36c59" containerName="copy" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.166422 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddff32a7-e027-4d2e-b77f-c7053ae36c59" containerName="copy" Feb 02 11:45:00 crc kubenswrapper[4838]: E0202 11:45:00.166433 4838 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68bafdd1-d2de-4cd7-8541-1ac5d0e49de7" containerName="extract-utilities" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.166440 4838 state_mem.go:107] "Deleted CPUSet assignment" podUID="68bafdd1-d2de-4cd7-8541-1ac5d0e49de7" containerName="extract-utilities" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.166675 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddff32a7-e027-4d2e-b77f-c7053ae36c59" containerName="copy" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.166693 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="68bafdd1-d2de-4cd7-8541-1ac5d0e49de7" containerName="registry-server" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.166713 4838 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddff32a7-e027-4d2e-b77f-c7053ae36c59" containerName="gather" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.167539 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500545-5fjsw" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.177664 4838 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.179555 4838 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.196267 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500545-5fjsw"] Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.212577 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7n7cv\" (UniqueName: \"kubernetes.io/projected/db915b4a-35e2-43d4-abcf-bcdb0342e7f2-kube-api-access-7n7cv\") pod \"collect-profiles-29500545-5fjsw\" (UID: \"db915b4a-35e2-43d4-abcf-bcdb0342e7f2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500545-5fjsw" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.212724 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/db915b4a-35e2-43d4-abcf-bcdb0342e7f2-secret-volume\") pod \"collect-profiles-29500545-5fjsw\" (UID: \"db915b4a-35e2-43d4-abcf-bcdb0342e7f2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500545-5fjsw" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.212847 4838 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/db915b4a-35e2-43d4-abcf-bcdb0342e7f2-config-volume\") pod \"collect-profiles-29500545-5fjsw\" (UID: \"db915b4a-35e2-43d4-abcf-bcdb0342e7f2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500545-5fjsw" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.315526 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7n7cv\" (UniqueName: \"kubernetes.io/projected/db915b4a-35e2-43d4-abcf-bcdb0342e7f2-kube-api-access-7n7cv\") pod \"collect-profiles-29500545-5fjsw\" (UID: \"db915b4a-35e2-43d4-abcf-bcdb0342e7f2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500545-5fjsw" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.315636 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/db915b4a-35e2-43d4-abcf-bcdb0342e7f2-secret-volume\") pod \"collect-profiles-29500545-5fjsw\" (UID: \"db915b4a-35e2-43d4-abcf-bcdb0342e7f2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500545-5fjsw" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.315699 4838 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/db915b4a-35e2-43d4-abcf-bcdb0342e7f2-config-volume\") pod \"collect-profiles-29500545-5fjsw\" (UID: \"db915b4a-35e2-43d4-abcf-bcdb0342e7f2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500545-5fjsw" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.317014 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/db915b4a-35e2-43d4-abcf-bcdb0342e7f2-config-volume\") pod \"collect-profiles-29500545-5fjsw\" (UID: \"db915b4a-35e2-43d4-abcf-bcdb0342e7f2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500545-5fjsw" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.328451 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/db915b4a-35e2-43d4-abcf-bcdb0342e7f2-secret-volume\") pod \"collect-profiles-29500545-5fjsw\" (UID: \"db915b4a-35e2-43d4-abcf-bcdb0342e7f2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500545-5fjsw" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.335157 4838 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7n7cv\" (UniqueName: \"kubernetes.io/projected/db915b4a-35e2-43d4-abcf-bcdb0342e7f2-kube-api-access-7n7cv\") pod \"collect-profiles-29500545-5fjsw\" (UID: \"db915b4a-35e2-43d4-abcf-bcdb0342e7f2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500545-5fjsw" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.510635 4838 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500545-5fjsw" Feb 02 11:45:00 crc kubenswrapper[4838]: I0202 11:45:00.962826 4838 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500545-5fjsw"] Feb 02 11:45:01 crc kubenswrapper[4838]: E0202 11:45:01.579053 4838 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddb915b4a_35e2_43d4_abcf_bcdb0342e7f2.slice/crio-conmon-0fb5d7597426a06d1f9d064023ecec34818971eedbf391f21f84c9f349d582f1.scope\": RecentStats: unable to find data in memory cache]" Feb 02 11:45:01 crc kubenswrapper[4838]: I0202 11:45:01.923107 4838 generic.go:334] "Generic (PLEG): container finished" podID="db915b4a-35e2-43d4-abcf-bcdb0342e7f2" containerID="0fb5d7597426a06d1f9d064023ecec34818971eedbf391f21f84c9f349d582f1" exitCode=0 Feb 02 11:45:01 crc kubenswrapper[4838]: I0202 11:45:01.923179 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500545-5fjsw" event={"ID":"db915b4a-35e2-43d4-abcf-bcdb0342e7f2","Type":"ContainerDied","Data":"0fb5d7597426a06d1f9d064023ecec34818971eedbf391f21f84c9f349d582f1"} Feb 02 11:45:01 crc kubenswrapper[4838]: I0202 11:45:01.923230 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500545-5fjsw" event={"ID":"db915b4a-35e2-43d4-abcf-bcdb0342e7f2","Type":"ContainerStarted","Data":"16d511ea3d8729d80df9ed2384964942d83a13a741832e01eac2f75ea95cd85d"} Feb 02 11:45:02 crc kubenswrapper[4838]: I0202 11:45:02.506209 4838 scope.go:117] "RemoveContainer" containerID="50f5ae09aeebc172928bc46b6909f71e360f73d5ed00ab1cdfd659a8d6bf2c78" Feb 02 11:45:02 crc kubenswrapper[4838]: E0202 11:45:02.506796 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" Feb 02 11:45:03 crc kubenswrapper[4838]: I0202 11:45:03.306049 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500545-5fjsw" Feb 02 11:45:03 crc kubenswrapper[4838]: I0202 11:45:03.483021 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/db915b4a-35e2-43d4-abcf-bcdb0342e7f2-secret-volume\") pod \"db915b4a-35e2-43d4-abcf-bcdb0342e7f2\" (UID: \"db915b4a-35e2-43d4-abcf-bcdb0342e7f2\") " Feb 02 11:45:03 crc kubenswrapper[4838]: I0202 11:45:03.483098 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/db915b4a-35e2-43d4-abcf-bcdb0342e7f2-config-volume\") pod \"db915b4a-35e2-43d4-abcf-bcdb0342e7f2\" (UID: \"db915b4a-35e2-43d4-abcf-bcdb0342e7f2\") " Feb 02 11:45:03 crc kubenswrapper[4838]: I0202 11:45:03.483125 4838 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7n7cv\" (UniqueName: \"kubernetes.io/projected/db915b4a-35e2-43d4-abcf-bcdb0342e7f2-kube-api-access-7n7cv\") pod \"db915b4a-35e2-43d4-abcf-bcdb0342e7f2\" (UID: \"db915b4a-35e2-43d4-abcf-bcdb0342e7f2\") " Feb 02 11:45:03 crc kubenswrapper[4838]: I0202 11:45:03.485306 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db915b4a-35e2-43d4-abcf-bcdb0342e7f2-config-volume" (OuterVolumeSpecName: "config-volume") pod "db915b4a-35e2-43d4-abcf-bcdb0342e7f2" (UID: "db915b4a-35e2-43d4-abcf-bcdb0342e7f2"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 11:45:03 crc kubenswrapper[4838]: I0202 11:45:03.487890 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db915b4a-35e2-43d4-abcf-bcdb0342e7f2-kube-api-access-7n7cv" (OuterVolumeSpecName: "kube-api-access-7n7cv") pod "db915b4a-35e2-43d4-abcf-bcdb0342e7f2" (UID: "db915b4a-35e2-43d4-abcf-bcdb0342e7f2"). InnerVolumeSpecName "kube-api-access-7n7cv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 11:45:03 crc kubenswrapper[4838]: I0202 11:45:03.489511 4838 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db915b4a-35e2-43d4-abcf-bcdb0342e7f2-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "db915b4a-35e2-43d4-abcf-bcdb0342e7f2" (UID: "db915b4a-35e2-43d4-abcf-bcdb0342e7f2"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 11:45:03 crc kubenswrapper[4838]: I0202 11:45:03.586291 4838 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/db915b4a-35e2-43d4-abcf-bcdb0342e7f2-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 11:45:03 crc kubenswrapper[4838]: I0202 11:45:03.586342 4838 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/db915b4a-35e2-43d4-abcf-bcdb0342e7f2-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 11:45:03 crc kubenswrapper[4838]: I0202 11:45:03.586360 4838 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7n7cv\" (UniqueName: \"kubernetes.io/projected/db915b4a-35e2-43d4-abcf-bcdb0342e7f2-kube-api-access-7n7cv\") on node \"crc\" DevicePath \"\"" Feb 02 11:45:03 crc kubenswrapper[4838]: I0202 11:45:03.946889 4838 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500545-5fjsw" event={"ID":"db915b4a-35e2-43d4-abcf-bcdb0342e7f2","Type":"ContainerDied","Data":"16d511ea3d8729d80df9ed2384964942d83a13a741832e01eac2f75ea95cd85d"} Feb 02 11:45:03 crc kubenswrapper[4838]: I0202 11:45:03.946940 4838 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="16d511ea3d8729d80df9ed2384964942d83a13a741832e01eac2f75ea95cd85d" Feb 02 11:45:03 crc kubenswrapper[4838]: I0202 11:45:03.947007 4838 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500545-5fjsw" Feb 02 11:45:04 crc kubenswrapper[4838]: I0202 11:45:04.381261 4838 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz"] Feb 02 11:45:04 crc kubenswrapper[4838]: I0202 11:45:04.391030 4838 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500500-msftz"] Feb 02 11:45:04 crc kubenswrapper[4838]: I0202 11:45:04.520703 4838 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05d7eca8-60b7-46dc-bdf0-510d63e525e6" path="/var/lib/kubelet/pods/05d7eca8-60b7-46dc-bdf0-510d63e525e6/volumes" Feb 02 11:45:17 crc kubenswrapper[4838]: I0202 11:45:17.506727 4838 scope.go:117] "RemoveContainer" containerID="50f5ae09aeebc172928bc46b6909f71e360f73d5ed00ab1cdfd659a8d6bf2c78" Feb 02 11:45:17 crc kubenswrapper[4838]: E0202 11:45:17.507765 4838 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-n7ctv_openshift-machine-config-operator(c1e0f2bd-7afa-44f4-a3cb-cad88c063dce)\"" pod="openshift-machine-config-operator/machine-config-daemon-n7ctv" podUID="c1e0f2bd-7afa-44f4-a3cb-cad88c063dce" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515140107134024441 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015140107135017357 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015140100547016502 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015140100550015444 5ustar corecore